lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
3c1d58c6e3626a5613be812a807bbd7a00b29ff4
0
davido/gerrit-reviewers-plugin,GerritCodeReview/plugins_reviewers,GerritCodeReview/plugins_reviewers,davido/gerrit-reviewers-plugin,GerritCodeReview/plugins_reviewers
// Copyright (C) 2014 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.googlesource.gerrit.plugins.reviewers; import com.google.common.base.Objects; import com.google.gerrit.common.ChangeHooks; import com.google.gerrit.extensions.annotations.PluginName; import com.google.gerrit.extensions.restapi.ResourceConflictException; import com.google.gerrit.extensions.restapi.ResourceNotFoundException; import com.google.gerrit.extensions.restapi.RestApiException; import com.google.gerrit.extensions.restapi.RestModifyView; import com.google.gerrit.reviewdb.client.Branch; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.reviewdb.client.RefNames; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.git.MetaDataUpdate; import com.google.gerrit.server.project.ProjectCache; import com.google.gerrit.server.project.ProjectResource; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import com.googlesource.gerrit.plugins.reviewers.PutReviewers.Input; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.errors.RepositoryNotFoundException; import org.eclipse.jgit.lib.ObjectId; import java.io.IOException; import java.util.List; @Singleton class PutReviewers implements RestModifyView<ProjectResource, Input> { public static class Input { public Action action; public String filter; public String reviewer; } private final String pluginName; private final ReviewersConfig.Factory configFactory; private final MetaDataUpdate.User metaDataUpdateFactory; private final ProjectCache projectCache; private final Provider<CurrentUser> currentUser; private final ChangeHooks hooks; @Inject PutReviewers(@PluginName String pluginName, ReviewersConfig.Factory configFactory, MetaDataUpdate.User metaDataUpdateFactory, ProjectCache projectCache, ChangeHooks hooks, Provider<CurrentUser> currentUser) { this.pluginName = pluginName; this.configFactory = configFactory; this.metaDataUpdateFactory = metaDataUpdateFactory; this.projectCache = projectCache; this.hooks = hooks; this.currentUser = currentUser; } @Override public List<ReviewerFilterSection> apply(ProjectResource rsrc, Input input) throws RestApiException { Project.NameKey projectName = rsrc.getNameKey(); ReviewersConfig cfg = configFactory.create(projectName); if (!rsrc.getControl().isOwner() || cfg == null) { throw new ResourceNotFoundException(projectName.get()); } MetaDataUpdate md; try { md = metaDataUpdateFactory.create(projectName); } catch (RepositoryNotFoundException notFound) { throw new ResourceNotFoundException(projectName.get()); } catch (IOException e) { throw new ResourceNotFoundException(projectName.get(), e); } try { cfg.load(md); if (input.action == Action.ADD) { cfg.addReviewer(input.filter, input.reviewer); } else { cfg.removeReviewer(input.filter, input.reviewer); } md.setMessage("Modify reviewers.config\n"); try { ObjectId baseRev = cfg.getRevision(); ObjectId commitRev = cfg.commit(md); // Only fire hook if project was actually changed. if (!Objects.equal(baseRev, commitRev)) { IdentifiedUser user = (IdentifiedUser) currentUser.get(); hooks.doRefUpdatedHook( new Branch.NameKey(projectName, RefNames.REFS_CONFIG), baseRev, commitRev, user.getAccount()); } projectCache.evict(projectName); } catch (IOException e) { if (e.getCause() instanceof ConfigInvalidException) { throw new ResourceConflictException("Cannot update " + projectName + ": " + e.getCause().getMessage()); } else { throw new ResourceConflictException("Cannot update " + projectName); } } } catch (ConfigInvalidException err) { throw new ResourceConflictException("Cannot read " + pluginName + " configurations for project " + projectName, err); } catch (IOException err) { throw new ResourceConflictException("Cannot update " + pluginName + " configurations for project " + projectName, err); } finally { md.close(); } return cfg.getReviewerFilterSections(); } }
src/main/java/com/googlesource/gerrit/plugins/reviewers/PutReviewers.java
// Copyright (C) 2014 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.googlesource.gerrit.plugins.reviewers; import com.google.common.base.Objects; import com.google.gerrit.common.ChangeHooks; import com.google.gerrit.extensions.annotations.PluginName; import com.google.gerrit.extensions.restapi.AuthException; import com.google.gerrit.extensions.restapi.BadRequestException; import com.google.gerrit.extensions.restapi.ResourceConflictException; import com.google.gerrit.extensions.restapi.ResourceNotFoundException; import com.google.gerrit.extensions.restapi.RestModifyView; import com.google.gerrit.reviewdb.client.Branch; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.reviewdb.client.RefNames; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.git.MetaDataUpdate; import com.google.gerrit.server.project.ProjectCache; import com.google.gerrit.server.project.ProjectResource; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import com.googlesource.gerrit.plugins.reviewers.PutReviewers.Input; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.errors.RepositoryNotFoundException; import org.eclipse.jgit.lib.ObjectId; import java.io.IOException; import java.util.List; @Singleton class PutReviewers implements RestModifyView<ProjectResource, Input> { public static class Input { public Action action; public String filter; public String reviewer; } private final String pluginName; private final ReviewersConfig.Factory configFactory; private final MetaDataUpdate.User metaDataUpdateFactory; private final ProjectCache projectCache; private final Provider<CurrentUser> currentUser; private final ChangeHooks hooks; @Inject PutReviewers(@PluginName String pluginName, ReviewersConfig.Factory configFactory, MetaDataUpdate.User metaDataUpdateFactory, ProjectCache projectCache, ChangeHooks hooks, Provider<CurrentUser> currentUser) { this.pluginName = pluginName; this.configFactory = configFactory; this.metaDataUpdateFactory = metaDataUpdateFactory; this.projectCache = projectCache; this.hooks = hooks; this.currentUser = currentUser; } @Override public List<ReviewerFilterSection> apply(ProjectResource rsrc, Input input) throws AuthException, BadRequestException, ResourceConflictException, Exception { Project.NameKey projectName = rsrc.getNameKey(); ReviewersConfig cfg = configFactory.create(projectName); if (!rsrc.getControl().isOwner() || cfg == null) { throw new ResourceNotFoundException(projectName.get()); } MetaDataUpdate md; try { md = metaDataUpdateFactory.create(projectName); } catch (RepositoryNotFoundException notFound) { throw new ResourceNotFoundException(projectName.get()); } catch (IOException e) { throw new ResourceNotFoundException(projectName.get(), e); } try { cfg.load(md); if (input.action == Action.ADD) { cfg.addReviewer(input.filter, input.reviewer); } else { cfg.removeReviewer(input.filter, input.reviewer); } md.setMessage("Modify reviewers.config\n"); try { ObjectId baseRev = cfg.getRevision(); ObjectId commitRev = cfg.commit(md); // Only fire hook if project was actually changed. if (!Objects.equal(baseRev, commitRev)) { IdentifiedUser user = (IdentifiedUser) currentUser.get(); hooks.doRefUpdatedHook( new Branch.NameKey(projectName, RefNames.REFS_CONFIG), baseRev, commitRev, user.getAccount()); } projectCache.evict(projectName); } catch (IOException e) { if (e.getCause() instanceof ConfigInvalidException) { throw new ResourceConflictException("Cannot update " + projectName + ": " + e.getCause().getMessage()); } else { throw new ResourceConflictException("Cannot update " + projectName); } } } catch (ConfigInvalidException err) { throw new ResourceConflictException("Cannot read " + pluginName + " configurations for project " + projectName, err); } catch (IOException err) { throw new ResourceConflictException("Cannot update " + pluginName + " configurations for project " + projectName, err); } finally { md.close(); } return cfg.getReviewerFilterSections(); } }
PutReviewers: Simplify exception handling Change-Id: I237cb46eab75eca3998f3a232260939ce1e4ae25
src/main/java/com/googlesource/gerrit/plugins/reviewers/PutReviewers.java
PutReviewers: Simplify exception handling
Java
apache-2.0
5cd73ce7f1a3b8bafc36454f01b22f3692010ec0
0
IHTSDO/rf2-to-rf1-conversion,IHTSDO/rf2-to-rf1-conversion
package org.ihtsdo.snomed.rf2torf1conversion; import static org.ihtsdo.snomed.rf2torf1conversion.GlobalUtils.*; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Scanner; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.lang.reflect.Type; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; import org.ihtsdo.snomed.rf2torf1conversion.pojo.Concept; import org.ihtsdo.snomed.rf2torf1conversion.pojo.ConceptDeserializer; import org.ihtsdo.snomed.rf2torf1conversion.pojo.LateralityIndicator; import org.ihtsdo.snomed.rf2torf1conversion.pojo.QualifyingRelationshipAttribute; import org.ihtsdo.snomed.rf2torf1conversion.pojo.QualifyingRelationshipRule; import org.ihtsdo.snomed.rf2torf1conversion.pojo.RF1SchemaConstants; import org.ihtsdo.snomed.rf2torf1conversion.pojo.RF2SchemaConstants; import com.google.common.base.Stopwatch; import com.google.common.io.Files; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; public class ConversionManager implements RF2SchemaConstants, RF1SchemaConstants { File intRf2Archive; File extRf2Archive; File unzipLocation = null; File additionalFilesLocation = null; File previousRF1Location; boolean useRelationshipIds = false; DBManager db; String intReleaseDate; String extReleaseDate; boolean includeHistory = true; boolean includeAllQualifyingRelationships = false; boolean includeLateralityIndicators = false; boolean isBeta = false; boolean onlyHistory = false; boolean isExtension = false; boolean goInteractive = false; Edition edition; private String EXT = "EXT"; private String LNG = "LNG"; private String DATE = "DATE"; private String OUT = "OUT"; private String outputFolderTemplate = "SnomedCT_OUT_INT_DATE"; private String ANCIENT_HISTORY = "/sct1_ComponentHistory_Core_INT_20130731.txt"; private String QUALIFYING_RULES = "/qualifying_relationship_rules.json"; private String AVAILABLE_SUBSET_IDS = "/available_sctids_partition_03.txt"; private String AVAILABLE_RELATIONSHIP_IDS = "/available_sctids_partition_02.txt"; private String RELATIONSHIP_FILENAME = "SnomedCT_OUT_INT_DATE/Terminology/Content/sct1_Relationships_Core_INT_DATE.txt"; private String BETA_PREFIX = "x"; Set<File> filesLoaded = new HashSet<File>(); private Long[] subsetIds; private Long maxPreviousSubsetId = null; private int previousSubsetVersion = 29; //Taken from 20160131 RF1 International Release enum Edition { INTERNATIONAL, SPANISH }; class Dialect { String langRefSetId; String langCode; Dialect (String langRefSetId, String langCode) { this.langRefSetId = langRefSetId; this.langCode = langCode; } } public final Dialect dialectEs = new Dialect ("450828004","es"); //Latin American Spanish public final Dialect dialectGb = new Dialect ("900000000000508004","en-GB"); public final Dialect dialectUs = new Dialect ("900000000000509007","en-US"); class EditionConfig { String editionName; String langCode; String outputName; Dialect[] dialects; EditionConfig (String editionName, String language, String outputName, Dialect[] dialects) { this.editionName = editionName; this.outputName = outputName; this.langCode = language; this.dialects = dialects; } } private static final String EDITION_DETERMINER = "sct2_Description_EXTFull-LNG_INT_DATE.txt"; static Map<Edition, EditionConfig> knownEditionMap = new HashMap<Edition, EditionConfig>(); { knownEditionMap.put(Edition.INTERNATIONAL, new EditionConfig("","en", "RF1Release", new Dialect[]{dialectGb, dialectUs})); //International Edition has no Extension name knownEditionMap.put(Edition.SPANISH, new EditionConfig("SpanishExtension", "es", "SpanishRelease-es",new Dialect[]{dialectEs})); } static Map<String, String> intfileToTable = new HashMap<String, String>(); { intfileToTable.put("sct2_Concept_EXTFull_INT_DATE.txt", "rf2_concept_sv"); intfileToTable.put("sct2_Relationship_EXTFull_INT_DATE.txt", "rf2_rel_sv"); intfileToTable.put("sct2_StatedRelationship_EXTFull_INT_DATE.txt", "rf2_rel_sv"); intfileToTable.put("sct2_Identifier_EXTFull_INT_DATE.txt", "rf2_identifier_sv"); //Extensions can use a mix of International and their own descriptions intfileToTable.put(EDITION_DETERMINER, "rf2_term_sv"); //We need to know the International Preferred Term if the Extension doesn't specify one intfileToTable.put("der2_cRefset_LanguageEXTFull-LNG_INT_DATE.txt", "rf2_crefset_sv"); //Concepts still need inactivation reasons from the International Edition intfileToTable.put("der2_cRefset_AssociationReferenceEXTFull_INT_DATE.txt", "rf2_crefset_sv"); intfileToTable.put("der2_cRefset_AttributeValueEXTFull_INT_DATE.txt", "rf2_crefset_sv"); //CTV3 and SNOMED RT Identifiers come from the International Edition intfileToTable.put("der2_sRefset_SimpleMapEXTFull_INT_DATE.txt", "rf2_srefset_sv"); //intfileToTable.put("der2_iissscRefset_ComplexEXTMapFull_INT_DATE.txt", "rf2_iissscrefset_sv"); //intfileToTable.put("der2_iisssccRefset_ExtendedMapEXTFull_INT_DATE.txt", "rf2_iisssccrefset_sv"); } static Map<String, String> extfileToTable = new HashMap<String, String>(); { //Extension could supplement any file in international edition extfileToTable.putAll(intfileToTable); extfileToTable.put(EDITION_DETERMINER, "rf2_term_sv"); extfileToTable.put("sct2_TextDefinition_EXTFull-LNG_INT_DATE.txt", "rf2_def_sv"); extfileToTable.put("der2_cRefset_AssociationReferenceEXTFull_INT_DATE.txt", "rf2_crefset_sv"); extfileToTable.put("der2_cRefset_AttributeValueEXTFull_INT_DATE.txt", "rf2_crefset_sv"); extfileToTable.put("der2_Refset_SimpleEXTFull_INT_DATE.txt", "rf2_refset_sv"); extfileToTable.put("der2_cRefset_LanguageEXTFull-LNG_INT_DATE.txt", "rf2_crefset_sv"); extfileToTable.put("der2_sRefset_SimpleMapEXTFull_INT_DATE.txt", "rf2_srefset_sv"); //extfileToTable.put("der2_iissscRefset_ComplexEXTMapFull_INT_DATE.txt", "rf2_iissscrefset_sv"); //extfileToTable.put("der2_iisssccRefset_ExtendedMapEXTFull_INT_DATE.txt", "rf2_iisssccrefset_sv"); extfileToTable.put("der2_cciRefset_RefsetDescriptorEXTFull_INT_DATE.txt", "rf2_ccirefset_sv"); extfileToTable.put("der2_ciRefset_DescriptionTypeEXTFull_INT_DATE.txt", "rf2_cirefset_sv"); extfileToTable.put("der2_ssRefset_ModuleDependencyEXTFull_INT_DATE.txt", "rf2_ssrefset_sv"); } public static Map<String, String>intExportMap = new HashMap<String, String>(); { // The slashes will be replaced with the OS appropriate separator at export time intExportMap.put(outputFolderTemplate + "/Terminology/Content/sct1_Concepts_Core_INT_DATE.txt", "select CONCEPTID, CONCEPTSTATUS, FULLYSPECIFIEDNAME, CTV3ID, SNOMEDID, ISPRIMITIVE from rf21_concept"); intExportMap .put(RELATIONSHIP_FILENAME, "select RELATIONSHIPID,CONCEPTID1,RELATIONSHIPTYPE,CONCEPTID2,CHARACTERISTICTYPE,REFINABILITY,RELATIONSHIPGROUP from rf21_rel"); } public static Map<String, String> extExportMap = new HashMap<String, String>(); { // The slashes will be replaced with the OS appropriate separator at export time extExportMap .put(outputFolderTemplate + "/Terminology/Content/sct1_Descriptions_LNG_INT_DATE.txt", "select DESCRIPTIONID, DESCRIPTIONSTATUS, CONCEPTID, TERM, INITIALCAPITALSTATUS, DESC_TYPE as DESCRIPTIONTYPE, LANGUAGECODE from rf21_term"); extExportMap.put(outputFolderTemplate + "/Terminology/History/sct1_References_Core_INT_DATE.txt", "select COMPONENTID, REFERENCETYPE, REFERENCEDID from rf21_REFERENCE"); extExportMap .put(outputFolderTemplate + "/Resources/StatedRelationships/res1_StatedRelationships_Core_INT_DATE.txt", "select RELATIONSHIPID,CONCEPTID1,RELATIONSHIPTYPE,CONCEPTID2,CHARACTERISTICTYPE,REFINABILITY,RELATIONSHIPGROUP from rf21_stated_rel"); } public static void main(String[] args) throws RF1ConversionException { //Set Windows Line separator as that's an RF1 standard System.setProperty("line.separator", "\r\n"); ConversionManager cm = new ConversionManager(); cm.askForLateralityFile(); cm.doRf2toRf1Conversion(args); } private void doRf2toRf1Conversion(String[] args) throws RF1ConversionException { File tempDBLocation = Files.createTempDir(); init(args, tempDBLocation); createDatabaseSchema(); File intLoadingArea = null; File extloadingArea = null; File exportArea = null; Stopwatch stopwatch = Stopwatch.createStarted(); String completionStatus = "failed"; try { print("\nExtracting RF2 International Edition Data..."); intLoadingArea = unzipArchive(intRf2Archive); intReleaseDate = findDateInString(intLoadingArea.listFiles()[0].getName(), false); determineEdition(intLoadingArea, Edition.INTERNATIONAL, intReleaseDate); if (extRf2Archive != null) { print("\nExtracting RF2 Extension Data..."); extloadingArea = unzipArchive(extRf2Archive); extReleaseDate = findDateInString(extloadingArea.listFiles()[0].getName(), false); determineEdition(extloadingArea, null, extReleaseDate); isExtension = true; } String releaseDate = isExtension ? extReleaseDate : intReleaseDate; int releaseIndex = calculateReleaseIndex(releaseDate); EditionConfig config = knownEditionMap.get(edition); int newSubsetVersion = 0; if (previousRF1Location != null) { useRelationshipIds = true; //This will allow us to set up SubsetIds (using available_sctids_partition_03) //And a map of existing relationship Ids to use for reconciliation loadPreviousRF1(config); //Initialise a set of available SCTIDS InputStream availableRelIds = ConversionManager.class.getResourceAsStream(AVAILABLE_RELATIONSHIP_IDS); RF1Constants.intialiseAvailableRelationships(availableRelIds); newSubsetVersion = previousSubsetVersion + 1; } else { useDeterministicSubsetIds(releaseIndex, config); newSubsetVersion = previousSubsetVersion + releaseIndex; } db.runStatement("SET @useRelationshipIds = " + useRelationshipIds); setSubsetIds(newSubsetVersion); long maxOperations = getMaxOperations(); if (onlyHistory) { maxOperations = 250; } else if (isExtension) { maxOperations = includeHistory? maxOperations : 388; } else { maxOperations = includeHistory? maxOperations : 391; } setMaxOperations(maxOperations); completeOutputMap(config); db.runStatement("SET @langCode = '" + config.langCode + "'"); db.runStatement("SET @langRefSet = '" + config.dialects[0].langRefSetId + "'"); print("\nLoading " + Edition.INTERNATIONAL +" common RF2 Data..."); loadRF2Data(intLoadingArea, Edition.INTERNATIONAL, intReleaseDate, intfileToTable); //Load the rest of the files from the same loading area if International Release, otherwise use the extensionLoading Area File loadingArea = isExtension ? extloadingArea : intLoadingArea; print("\nLoading " + edition +" RF2 Data..."); loadRF2Data(loadingArea, edition, releaseDate, extfileToTable); debug("\nCreating RF2 indexes..."); db.executeResource("create_rf2_indexes.sql"); if (!onlyHistory) { print("\nCalculating RF2 snapshot..."); calculateRF2Snapshot(releaseDate); } print("\nConverting RF2 to RF1..."); convert(); print("\nExporting RF1 to file..."); exportArea = Files.createTempDir(); exportRF1Data(intExportMap, releaseDate, intReleaseDate, knownEditionMap.get(edition), exportArea); exportRF1Data(extExportMap, releaseDate, releaseDate, knownEditionMap.get(edition), exportArea); String filePath = getQualifyingRelationshipFilepath(releaseDate, knownEditionMap.get(edition), exportArea); if (includeAllQualifyingRelationships || includeLateralityIndicators) { print("\nLoading Inferred Relationship Hierarchy for Qualifying Relationship computation..."); loadRelationshipHierarchy(intLoadingArea); } if (includeAllQualifyingRelationships) { print ("\nGenerating qualifying relationships"); Set<QualifyingRelationshipAttribute> ruleAttributes = loadQualifyingRelationshipRules(); generateQualifyingRelationships(ruleAttributes, filePath); } if (includeLateralityIndicators) { print ("\nGenerating laterality qualifying relationships"); generateLateralityRelationships(filePath); } if (additionalFilesLocation != null) { includeAdditionalFiles(exportArea, releaseDate, knownEditionMap.get(edition)); } print("\nZipping archive"); createArchive(exportArea); completionStatus = "completed"; if (goInteractive) { doInteractive(); } } finally { print("\nProcess " + completionStatus + " in " + stopwatch + " after completing " + getProgress() + "/" + getMaxOperations() + " operations."); print("Cleaning up resources..."); try { db.shutDown(true); // Also deletes all files if (tempDBLocation != null && tempDBLocation.exists()) { tempDBLocation.delete(); } } catch (Exception e) { debug("Error while cleaning up database " + tempDBLocation.getPath() + e.getMessage()); } try { if (intLoadingArea != null && intLoadingArea.exists()) { FileUtils.deleteDirectory(intLoadingArea); } if (extloadingArea != null && extloadingArea.exists()) { FileUtils.deleteDirectory(extloadingArea); } if (exportArea != null && exportArea.exists()) { FileUtils.deleteDirectory(exportArea); } } catch (Exception e) { debug("Error while cleaning up loading/export Areas " + e.getMessage()); } } } private void doInteractive() { boolean quitDetected = false; StringBuilder buff = new StringBuilder(); try (Scanner in = new Scanner(System.in)) { print ("Enter sql command to run, terminate with semicolon or type quit; to finish"); while (!quitDetected) { buff.append(in.nextLine().trim()); if (buff.length() > 1 && buff.charAt(buff.length()-1) == ';') { String command = buff.toString(); if (command.equalsIgnoreCase("quit;")) { quitDetected = true; } else { try{ db.runStatement(command.toString()); } catch (Exception e) { e.printStackTrace(); } buff.setLength(0); } } else { buff.append(" "); } } } } private void completeOutputMap(EditionConfig editionConfig) { if (isExtension) { String archiveName = "SnomedCT_OUT_INT_DATE"; String folderName = "Language-" + editionConfig.langCode; String fileRoot = archiveName + File.separator + "Subsets" + File.separator + folderName + File.separator; String fileName = "der1_SubsetMembers_"+ editionConfig.langCode + "_INT_DATE.txt"; extExportMap.put(fileRoot + fileName, "select s.SubsetId, s.MemberID, s.MemberStatus, s.LinkedID from rf21_SUBSETS s, rf21_SUBSETLIST sl where s.SubsetOriginalId = sl.subsetOriginalId AND sl.languageCode = ''" + editionConfig.langCode + "'';"); fileName = "der1_Subsets_" + editionConfig.langCode + "_INT_DATE.txt"; extExportMap.put(fileRoot + fileName, "select sl.* from rf21_SUBSETLIST sl where languagecode = ''" + editionConfig.langCode + "'';"); extExportMap.put("SnomedCT_OUT_INT_DATE/Resources/TextDefinitions/sct1_TextDefinitions_LNG_INT_DATE.txt", "select * from rf21_DEF"); } else { extExportMap.put("SnomedCT_OUT_INT_DATE/Resources/TextDefinitions/sct1_TextDefinitions_en-US_INT_DATE.txt", "select * from rf21_DEF"); extExportMap .put("SnomedCT_OUT_INT_DATE/Subsets/Language-en-GB/der1_SubsetMembers_en-GB_INT_DATE.txt", "select s.SubsetId, s.MemberID, s.MemberStatus, s.LinkedID from rf21_SUBSETS s, rf21_SUBSETLIST sl where s.SubsetOriginalId = sl.subsetOriginalId AND sl.languageCode in (''en'',''en-GB'')"); extExportMap.put("SnomedCT_OUT_INT_DATE/Subsets/Language-en-GB/der1_Subsets_en-GB_INT_DATE.txt", "select sl.* from rf21_SUBSETLIST sl where languagecode like ''%GB%''"); extExportMap .put("SnomedCT_OUT_INT_DATE/Subsets/Language-en-US/der1_SubsetMembers_en-US_INT_DATE.txt", "select s.SubsetId, s.MemberID, s.MemberStatus, s.LinkedID from rf21_SUBSETS s, rf21_SUBSETLIST sl where s.SubsetOriginalId = sl.subsetOriginalId AND sl.languageCode in (''en'',''en-US'')"); extExportMap.put("SnomedCT_RF1Release_INT_DATE/Subsets/Language-en-US/der1_Subsets_en-US_INT_DATE.txt", "select sl.* from rf21_SUBSETLIST sl where languagecode like ''%US%''"); } if (includeHistory) { extExportMap.put("SnomedCT_OUT_INT_DATE/Terminology/History/sct1_ComponentHistory_Core_INT_DATE.txt", "select COMPONENTID, RELEASEVERSION, CHANGETYPE, STATUS, REASON from rf21_COMPONENTHISTORY"); } } private void determineEdition(File loadingArea, Edition enforceEdition, String releaseDate) throws RF1ConversionException { //Loop through known editions and see if EDITION_DETERMINER file is present for (Map.Entry<Edition, EditionConfig> thisEdition : knownEditionMap.entrySet()) for (File thisFile : loadingArea.listFiles()) { EditionConfig parts = thisEdition.getValue(); String target = EDITION_DETERMINER.replace(EXT, parts.editionName) .replace(LNG, parts.langCode) .replace(DATE, releaseDate); if (thisFile.getName().equals(target)) { this.edition = thisEdition.getKey(); if (enforceEdition != null && this.edition != enforceEdition) { throw new RF1ConversionException("Needed " + enforceEdition + ", instead found " + this.edition); } return; } } throw new RF1ConversionException ("Failed to fine file matching any known edition: " + EDITION_DETERMINER + " in" + loadingArea.getAbsolutePath()); } private File unzipArchive(File archive) throws RF1ConversionException { File tempDir = null; try { if (unzipLocation != null) { tempDir = java.nio.file.Files.createTempDirectory(unzipLocation.toPath(), "rf2-to-rf1-").toFile(); } else { // Work in the traditional temp file location for the OS tempDir = Files.createTempDir(); } } catch (IOException e) { throw new RF1ConversionException("Unable to create temporary directory for archive extration"); } // We only need to work with the full files //...mostly, we also need the Snapshot Relationship file in order to work out the Qualifying Relationships unzipFlat(archive, tempDir, new String[]{"Full","sct2_Relationship_Snapshot"}); return tempDir; } private void createDatabaseSchema() throws RF1ConversionException { print("Creating database schema"); db.executeResource("create_rf2_schema.sql"); } private void calculateRF2Snapshot(String releaseDate) throws RF1ConversionException { String setDateSql = "SET @RDATE = " + releaseDate; db.runStatement(setDateSql); db.executeResource("create_rf2_snapshot.sql"); db.executeResource("populate_subset_2_refset.sql"); } private void convert() throws RF1ConversionException { db.executeResource("create_rf1_schema.sql"); if (includeHistory) { db.executeResource("populate_rf1_historical.sql"); } else { print("\nSkipping generation of RF1 History. Set -h parameter if this is required."); } if (!onlyHistory) { db.executeResource("populate_rf1.sql"); if (isExtension) { db.executeResource("populate_rf1_ext_descriptions.sql"); } else { db.executeResource("populate_rf1_int_descriptions.sql"); } db.executeResource("populate_rf1_associations.sql"); } } private void init(String[] args, File dbLocation) throws RF1ConversionException { if (args.length < 1) { print("Usage: java ConversionManager [-v] [-h] [-b] [-i] [-q] [-a <additional files location>] [-p <previous RF1 archive] [-u <unzip location>] <rf2 archive location> [<rf2 extension archive>]"); print(" b - beta indicator, causes an x to be prepended to output filenames"); print(" p - previous RF1 archive required for SubsetId and Relationship Id generation"); exit(); } boolean isUnzipLocation = false; boolean isAdditionalFilesLocation = false; boolean isPreviousRF1Location = false; for (String thisArg : args) { if (thisArg.equals("-v")) { GlobalUtils.verbose = true; } else if (thisArg.equals("-i")) { goInteractive = true; } else if (thisArg.equals("-H")) { includeHistory = true; onlyHistory = true; } else if (thisArg.equals("-b")) { isBeta = true; }else if (thisArg.equals("-u")) { isUnzipLocation = true; } else if (thisArg.equals("-a")) { isAdditionalFilesLocation = true; } else if (thisArg.equals("-p")) { isPreviousRF1Location = true; } else if (thisArg.equals("-q")) { includeAllQualifyingRelationships = true; } else if (isUnzipLocation) { unzipLocation = new File(thisArg); if (!unzipLocation.isDirectory()) { throw new RF1ConversionException(thisArg + " is an invalid location to unzip archive to!"); } isUnzipLocation = false; } else if (isAdditionalFilesLocation) { additionalFilesLocation = new File(thisArg); if (!additionalFilesLocation.isDirectory()) { throw new RF1ConversionException(thisArg + " is an invalid location to find additional files."); } isAdditionalFilesLocation = false; } else if (isPreviousRF1Location) { previousRF1Location = new File(thisArg); if (!previousRF1Location.exists() || !previousRF1Location.canRead()) { throw new RF1ConversionException(thisArg + " does not appear to be a valid RF1 archive."); } isPreviousRF1Location = false; } else if (intRf2Archive == null){ File possibleArchive = new File(thisArg); if (possibleArchive.exists() && !possibleArchive.isDirectory() && possibleArchive.canRead()) { intRf2Archive = possibleArchive; } } else { File possibleArchive = new File(thisArg); if (possibleArchive.exists() && !possibleArchive.isDirectory() && possibleArchive.canRead()) { extRf2Archive = possibleArchive; } } } if (intRf2Archive == null) { print("Unable to determine RF2 Archive: " + args[args.length - 1]); exit(); } db = new DBManager(); db.init(dbLocation); } private void loadRF2Data(File loadingArea, Edition edition, String releaseDate, Map<String, String> fileToTable) throws RF1ConversionException { // We can do the load in parallel. Only 3 threads because heavily I/O db.startParallelProcessing(3); for (Map.Entry<String, String> entry : fileToTable.entrySet()) { // Replace DATE in the filename with the actual release date String fileName = entry.getKey().replace(DATE, releaseDate) .replace(EXT, knownEditionMap.get(edition).editionName) .replace(LNG, knownEditionMap.get(edition).langCode); File file = new File(loadingArea + File.separator + fileName); //Only load each file once if (filesLoaded.contains(file)) { debug ("Skipping " + file.getName() + " already loaded as part of Internation Edition"); } else if (file.exists()) { db.load(file, entry.getValue()); filesLoaded.add(file); } else { print("\nWarning, skipping load of file " + file.getName() + " - not present"); } } db.finishParallelProcessing(); } private void exportRF1Data(Map<String, String> exportMap, String packageReleaseDate, String fileReleaseDate, EditionConfig editionConfig, File exportArea) throws RF1ConversionException { // We can do the export in parallel. Only 3 threads because heavily I/O db.startParallelProcessing(3); for (Map.Entry<String, String> entry : exportMap.entrySet()) { // Replace DATE in the filename with the actual release date String fileName = entry.getKey().replaceFirst(DATE, packageReleaseDate) .replace(DATE, fileReleaseDate) .replace(OUT, editionConfig.outputName) .replace(LNG, editionConfig.langCode); fileName = modifyFilenameIfBeta(fileName); String filePath = exportArea + File.separator + fileName; //If we're doing the history file, then we need to prepend the static //resource file InputStream isInclude = null; if (includeHistory && fileName.contains("ComponentHistory")) { isInclude = ConversionManager.class.getResourceAsStream(ANCIENT_HISTORY); if (isInclude == null) { throw new RF1ConversionException("Unable to obtain history file: " + ANCIENT_HISTORY); } } db.export(filePath, entry.getValue(), isInclude); } db.finishParallelProcessing(); } private String modifyFilenameIfBeta(String fileName) { if (isBeta) { //Beta prefix before the file shortname, but also for the leading directory int lastSlash = fileName.lastIndexOf(File.separator) + 1; fileName = BETA_PREFIX + fileName.substring(0,lastSlash) + BETA_PREFIX + fileName.substring(lastSlash); } return fileName; } private void loadRelationshipHierarchy(File intLoadingArea) throws RF1ConversionException { String fileName = intLoadingArea.getAbsolutePath() + File.separator + "sct2_Relationship_Snapshot_INT_DATE.txt"; fileName = fileName.replace(DATE, intReleaseDate); GraphLoader gl = new GraphLoader (fileName); gl.loadRelationships(); } private Set<QualifyingRelationshipAttribute> loadQualifyingRelationshipRules() { GsonBuilder gsonBuilder = new GsonBuilder(); gsonBuilder.registerTypeAdapter(Concept.class, new ConceptDeserializer()); Gson gson = gsonBuilder.create(); InputStream jsonStream = ConversionManager.class.getResourceAsStream(QUALIFYING_RULES); BufferedReader jsonReader = new BufferedReader(new InputStreamReader(jsonStream)); Type listType = new TypeToken<Set<QualifyingRelationshipAttribute>>() {}.getType(); Set<QualifyingRelationshipAttribute> attributes = gson.fromJson(jsonReader, listType); return attributes; } private void loadLateralityIndicators(File lateralityFile) throws RF1ConversionException { try (BufferedReader br = new BufferedReader(new FileReader(lateralityFile))) { String line; boolean firstLine = true; while ((line = br.readLine()) != null) { if (!firstLine) { LateralityIndicator.registerIndicator(line); } else { firstLine = false; } } } catch (IOException ioe) { throw new RF1ConversionException ("Unable to import laterality reference file " + lateralityFile.getAbsolutePath(), ioe); } } private void generateQualifyingRelationships( Set<QualifyingRelationshipAttribute> ruleAttributes, String filePath) throws RF1ConversionException { //For each attribute, work through each rule creating rules for self and all children of starting points, //except for exceptions try(FileWriter fw = new FileWriter(filePath, true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { for (QualifyingRelationshipAttribute thisAttribute : ruleAttributes) { StringBuffer commonRF1 = new StringBuffer().append(FIELD_DELIMITER) .append(thisAttribute.getType().getSctId()).append(FIELD_DELIMITER) .append(thisAttribute.getDestination().getSctId()).append(FIELD_DELIMITER) .append("1\t")//Qualifying Rel type .append(thisAttribute.getRefinability()).append("\t0"); //Refineable, Group 0 for (QualifyingRelationshipRule thisRule : thisAttribute.getRules()) { Set<Concept> potentialApplications = thisRule.getStartPoint().getAllDescendents(Concept.DEPTH_NOT_SET); Collection<Concept> ruleAppliedTo = CollectionUtils.subtract(potentialApplications, thisRule.getExceptions()); for (Concept thisException : thisRule.getExceptions()) { Set<Concept> exceptionDescendents = thisException.getAllDescendents(Concept.DEPTH_NOT_SET); ruleAppliedTo = CollectionUtils.subtract(ruleAppliedTo, exceptionDescendents); } //Now the remaining concepts that the rules applies to can be written out to file for (Concept thisConcept : ruleAppliedTo) { //Concept may already have this attribute as a defining relationship, skip if so. if (!thisConcept.hasAttribute(thisAttribute)) { String rf1Line = FIELD_DELIMITER + thisConcept.getSctId() + commonRF1; out.println(rf1Line); } } } } } catch (IOException e) { throw new RF1ConversionException ("Failure while outputting Qualifying Relationships: " + e.toString()); } } private void generateLateralityRelationships(String filePath) throws RF1ConversionException { //Check every concept to see if has a laterality indicator, and doesn't already have that //attribute as a defining relationship Set<Concept> allConcepts = Concept.getConcept(SNOMED_ROOT_CONCEPT).getAllDescendents(Concept.DEPTH_NOT_SET); StringBuffer commonRF1 = new StringBuffer().append(FIELD_DELIMITER) .append(LATERALITY_ATTRIB).append(FIELD_DELIMITER) .append(SIDE_VALUE).append(FIELD_DELIMITER) .append("1\t")//Qualifying Rel type .append(RF1Constants.MUST_REFINE).append("\t0"); //Refineable, Group 0 Concept lat = Concept.getConcept(Long.parseLong(LATERALITY_ATTRIB)); Concept side = Concept.getConcept(Long.parseLong(SIDE_VALUE)); QualifyingRelationshipAttribute LateralityAttribute = new QualifyingRelationshipAttribute (lat, side, RF1Constants.MUST_REFINE); try(FileWriter fw = new FileWriter(filePath, true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { for (Concept thisConcept : allConcepts) { if (LateralityIndicator.hasLateralityIndicator(thisConcept.getSctId(), LateralityIndicator.Lattomidsag.YES)) { if (!thisConcept.hasAttribute(LateralityAttribute)) { String relId = ""; //Default is to blank relationship ids if (useRelationshipIds) { relId = RF1Constants.lookupRelationshipId(thisConcept.getSctId().toString(), LATERALITY_ATTRIB, SIDE_VALUE, UNGROUPED); } String rf1Line = relId + FIELD_DELIMITER + thisConcept.getSctId() + commonRF1; out.println(rf1Line); } } } }catch (IOException e){ throw new RF1ConversionException ("Failure while output Laterality Relationships: " + e.toString()); } } private String getQualifyingRelationshipFilepath(String releaseDate, EditionConfig editionConfig, File exportArea) throws RF1ConversionException { // Replace DATE in the filename with the actual release date String fileName = RELATIONSHIP_FILENAME.replaceFirst(DATE, releaseDate) .replace(DATE, releaseDate) .replace(OUT, editionConfig.outputName) .replace(LNG, editionConfig.langCode); fileName = modifyFilenameIfBeta(fileName); String filePath = exportArea + File.separator + fileName; File outputFile = new File(filePath); try{ if (!outputFile.exists()) { outputFile.getParentFile().mkdirs(); outputFile.createNewFile(); } } catch (IOException e) { throw new RF1ConversionException("Unable to create file for Qualifying Relationships: " + e); } return filePath; } private void askForLateralityFile() { try (Scanner in = new Scanner(System.in)) { print ("Do you wish to create Lateralized Qualifying Relationships? [Y/N]: "); String response = in.nextLine().trim(); if (response.toUpperCase().equals("Y")) { print ("Please provide matching laterality reference file location: "); String latFileLPath = in.nextLine().trim(); File lateralityFile = new File(latFileLPath); if (!lateralityFile.exists()) { print ("File not found: " + latFileLPath); askForLateralityFile(); } else { try{ loadLateralityIndicators(lateralityFile); includeLateralityIndicators = true; } catch (Exception e) { print ("Failed to load Laterality text file due to " + e.getMessage()); askForLateralityFile(); } } } } } private void includeAdditionalFiles(File outputDirectory, String releaseDate, EditionConfig editionConfig){ Map<String, String> targetLocation = new HashMap<String, String>(); targetLocation.put(".pdf", "Documentation/"); targetLocation.put("KeyIndex_", "Resources/Indexes/"); targetLocation.put("Canonical", "Resources/Canonical Table/"); String rootPath = outputDirectory.getAbsolutePath() + File.separator + (isBeta?BETA_PREFIX:"") + outputFolderTemplate + File.separator; rootPath = rootPath.replace(OUT, editionConfig.outputName) .replace(DATE, releaseDate); File[] directoryListing = additionalFilesLocation.listFiles(); if (directoryListing != null) { for (File child : directoryListing) { String childFilename = child.getName(); //Do we know to put this file in a particular location? //otherwise path will remain the root path for (String match : targetLocation.keySet()) { if (childFilename.contains(match)) { childFilename = targetLocation.get(match) + childFilename; break; } } //Ensure path exists for where file is being copied to File copiedFile = new File (rootPath + childFilename); copiedFile.getParentFile().mkdirs(); try { FileUtils.copyFile(child, copiedFile); print ("Copied additional file to " + copiedFile.getAbsolutePath()); } catch (IOException e) { print ("Unable to copy additional file " + childFilename + " due to " + e.getMessage()); } } } } private void loadPreviousRF1(EditionConfig config) throws RF1ConversionException { try { ZipInputStream zis = new ZipInputStream(new FileInputStream(previousRF1Location)); ZipEntry ze = zis.getNextEntry(); try { while (ze != null) { if (!ze.isDirectory()) { Path p = Paths.get(ze.getName()); String fileName = p.getFileName().toString(); if (fileName.contains("der1_Subsets")) { updateSubsetIds(zis, config); } else if (fileName.contains("sct1_Relationships")) { //We need to use static methods here so that H2 can access as functions. print ("\nLoading previous RF1 relationships"); RF1Constants.loadPreviousRelationships(zis); } } ze = zis.getNextEntry(); } } finally { zis.closeEntry(); zis.close(); } } catch (IOException e) { throw new RF1ConversionException("Failed to load previous RF1 archive " + previousRF1Location.getName(), e); } } private void updateSubsetIds(ZipInputStream zis, EditionConfig config) throws NumberFormatException, IOException { //This function will also pick up and set the previous subset version Long subsetId = loadSubsetsFile(zis); //Do we need to recover a new set of subsetIds? if (maxPreviousSubsetId == null || subsetId > maxPreviousSubsetId) { maxPreviousSubsetId = subsetId; InputStream is = ConversionManager.class.getResourceAsStream(AVAILABLE_SUBSET_IDS); try (BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))){ String line; int subsetIdsSet = 0; subsetIds = new Long[config.dialects.length]; while ((line = br.readLine()) != null && subsetIdsSet < config.dialects.length) { Long thisAvailableSubsetId = Long.parseLong(line.trim()); if (thisAvailableSubsetId.compareTo(maxPreviousSubsetId) > 0) { debug ("Obtaining new Subset Ids from resource file"); subsetIds[subsetIdsSet] = thisAvailableSubsetId; subsetIdsSet++; } } } } } /* * @return the greatest subsetId in the file */ private Long loadSubsetsFile(ZipInputStream zis) throws IOException { Long maxSubsetIdInFile = null; BufferedReader br = new BufferedReader(new InputStreamReader(zis, StandardCharsets.UTF_8)); String line; boolean isFirstLine = true; while ((line = br.readLine()) != null) { if (isFirstLine) { isFirstLine = false; continue; } String[] lineItems = line.split(FIELD_DELIMITER); //SubsetId is the first column Long thisSubsetId = Long.parseLong(lineItems[RF1_IDX_SUBSETID]); if (maxSubsetIdInFile == null || thisSubsetId > maxSubsetIdInFile) { maxSubsetIdInFile = thisSubsetId; } //SubsetVersion is the 3rd int thisSubsetVersion = Integer.parseInt(lineItems[RF1_IDX_SUBSETVERSION]); if (thisSubsetVersion > previousSubsetVersion) { previousSubsetVersion = thisSubsetVersion; } } return maxSubsetIdInFile; } private void useDeterministicSubsetIds(int releaseIndex, EditionConfig config) throws RF1ConversionException { try { InputStream is = ConversionManager.class.getResourceAsStream(AVAILABLE_SUBSET_IDS); try (BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))){ String line; int subsetIdsSet = 0; subsetIds = new Long[config.dialects.length]; int filePos = 0; while ((line = br.readLine()) != null && subsetIdsSet < config.dialects.length) { filePos++; Long thisAvailableSubsetId = Long.parseLong(line.trim()); if (filePos >= releaseIndex) { debug ("Obtaining new Subset Ids from resource file"); subsetIds[subsetIdsSet] = thisAvailableSubsetId; subsetIdsSet++; } } } } catch (IOException e) { throw new RF1ConversionException("Unable to determine new subset Ids",e); } } private void setSubsetIds(int newSubsetVersion) { for (int i=0 ; i<subsetIds.length; i++) { db.runStatement("SET @SUBSETID_" + (i+1) + " = " + subsetIds[i]); } db.runStatement("SET @SUBSET_VERSION = " + newSubsetVersion); } int calculateReleaseIndex(String releaseDate) { //returns a number that can be used when a previous release is not available //to give an incrementing variable that we can use to move through the SCTID 02 & 03 files int year = Integer.parseInt(releaseDate.substring(0, 4)); int month = Integer.parseInt(releaseDate.substring(4,6)); int index = ((year - 2016)*10) + month; return index; } }
src/main/java/org/ihtsdo/snomed/rf2torf1conversion/ConversionManager.java
package org.ihtsdo.snomed.rf2torf1conversion; import static org.ihtsdo.snomed.rf2torf1conversion.GlobalUtils.*; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Scanner; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.lang.reflect.Type; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; import org.ihtsdo.snomed.rf2torf1conversion.pojo.Concept; import org.ihtsdo.snomed.rf2torf1conversion.pojo.ConceptDeserializer; import org.ihtsdo.snomed.rf2torf1conversion.pojo.LateralityIndicator; import org.ihtsdo.snomed.rf2torf1conversion.pojo.QualifyingRelationshipAttribute; import org.ihtsdo.snomed.rf2torf1conversion.pojo.QualifyingRelationshipRule; import org.ihtsdo.snomed.rf2torf1conversion.pojo.RF1SchemaConstants; import org.ihtsdo.snomed.rf2torf1conversion.pojo.RF2SchemaConstants; import com.google.common.base.Stopwatch; import com.google.common.io.Files; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; public class ConversionManager implements RF2SchemaConstants, RF1SchemaConstants { File intRf2Archive; File extRf2Archive; File unzipLocation = null; File additionalFilesLocation = null; File previousRF1Location; boolean useRelationshipIds = false; DBManager db; String intReleaseDate; String extReleaseDate; boolean includeHistory = true; boolean includeAllQualifyingRelationships = false; boolean includeLateralityIndicators = false; boolean isBeta = false; boolean onlyHistory = false; boolean isExtension = false; boolean goInteractive = false; Edition edition; private String EXT = "EXT"; private String LNG = "LNG"; private String DATE = "DATE"; private String OUT = "OUT"; private String outputFolderTemplate = "SnomedCT_OUT_INT_DATE"; private String ANCIENT_HISTORY = "/sct1_ComponentHistory_Core_INT_20130731.txt"; private String QUALIFYING_RULES = "/qualifying_relationship_rules.json"; private String AVAILABLE_SUBSET_IDS = "/available_sctids_partition_03.txt"; private String AVAILABLE_RELATIONSHIP_IDS = "/available_sctids_partition_02.txt"; private String RELATIONSHIP_FILENAME = "SnomedCT_OUT_INT_DATE/Terminology/Content/sct1_Relationships_Core_INT_DATE.txt"; private String BETA_PREFIX = "x"; Set<File> filesLoaded = new HashSet<File>(); private Long[] subsetIds; private Long maxPreviousSubsetId = null; private int previousSubsetVersion = 29; //Taken from 20160131 RF1 International Release enum Edition { INTERNATIONAL, SPANISH }; class Dialect { String langRefSetId; String langCode; Dialect (String langRefSetId, String langCode) { this.langRefSetId = langRefSetId; this.langCode = langCode; } } public final Dialect dialectEs = new Dialect ("450828004","es"); //Latin American Spanish public final Dialect dialectGb = new Dialect ("900000000000508004","en-GB"); public final Dialect dialectUs = new Dialect ("900000000000509007","en-US"); class EditionConfig { String editionName; String langCode; String outputName; Dialect[] dialects; EditionConfig (String editionName, String language, String outputName, Dialect[] dialects) { this.editionName = editionName; this.outputName = outputName; this.langCode = language; this.dialects = dialects; } } private static final String EDITION_DETERMINER = "sct2_Description_EXTFull-LNG_INT_DATE.txt"; static Map<Edition, EditionConfig> knownEditionMap = new HashMap<Edition, EditionConfig>(); { knownEditionMap.put(Edition.INTERNATIONAL, new EditionConfig("","en", "RF1Release", new Dialect[]{dialectGb, dialectUs})); //International Edition has no Extension name knownEditionMap.put(Edition.SPANISH, new EditionConfig("SpanishExtension", "es", "SpanishRelease-es",new Dialect[]{dialectEs})); } static Map<String, String> intfileToTable = new HashMap<String, String>(); { intfileToTable.put("sct2_Concept_EXTFull_INT_DATE.txt", "rf2_concept_sv"); intfileToTable.put("sct2_Relationship_EXTFull_INT_DATE.txt", "rf2_rel_sv"); intfileToTable.put("sct2_StatedRelationship_EXTFull_INT_DATE.txt", "rf2_rel_sv"); intfileToTable.put("sct2_Identifier_EXTFull_INT_DATE.txt", "rf2_identifier_sv"); //Extensions can use a mix of International and their own descriptions intfileToTable.put(EDITION_DETERMINER, "rf2_term_sv"); //We need to know the International Preferred Term if the Extension doesn't specify one intfileToTable.put("der2_cRefset_LanguageEXTFull-LNG_INT_DATE.txt", "rf2_crefset_sv"); //Concepts still need inactivation reasons from the International Edition intfileToTable.put("der2_cRefset_AssociationReferenceEXTFull_INT_DATE.txt", "rf2_crefset_sv"); intfileToTable.put("der2_cRefset_AttributeValueEXTFull_INT_DATE.txt", "rf2_crefset_sv"); //CTV3 and SNOMED RT Identifiers come from the International Edition intfileToTable.put("der2_sRefset_SimpleMapEXTFull_INT_DATE.txt", "rf2_srefset_sv"); //intfileToTable.put("der2_iissscRefset_ComplexEXTMapFull_INT_DATE.txt", "rf2_iissscrefset_sv"); //intfileToTable.put("der2_iisssccRefset_ExtendedMapEXTFull_INT_DATE.txt", "rf2_iisssccrefset_sv"); } static Map<String, String> extfileToTable = new HashMap<String, String>(); { //Extension could supplement any file in international edition extfileToTable.putAll(intfileToTable); extfileToTable.put(EDITION_DETERMINER, "rf2_term_sv"); extfileToTable.put("sct2_TextDefinition_EXTFull-LNG_INT_DATE.txt", "rf2_def_sv"); extfileToTable.put("der2_cRefset_AssociationReferenceEXTFull_INT_DATE.txt", "rf2_crefset_sv"); extfileToTable.put("der2_cRefset_AttributeValueEXTFull_INT_DATE.txt", "rf2_crefset_sv"); extfileToTable.put("der2_Refset_SimpleEXTFull_INT_DATE.txt", "rf2_refset_sv"); extfileToTable.put("der2_cRefset_LanguageEXTFull-LNG_INT_DATE.txt", "rf2_crefset_sv"); extfileToTable.put("der2_sRefset_SimpleMapEXTFull_INT_DATE.txt", "rf2_srefset_sv"); //extfileToTable.put("der2_iissscRefset_ComplexEXTMapFull_INT_DATE.txt", "rf2_iissscrefset_sv"); //extfileToTable.put("der2_iisssccRefset_ExtendedMapEXTFull_INT_DATE.txt", "rf2_iisssccrefset_sv"); extfileToTable.put("der2_cciRefset_RefsetDescriptorEXTFull_INT_DATE.txt", "rf2_ccirefset_sv"); extfileToTable.put("der2_ciRefset_DescriptionTypeEXTFull_INT_DATE.txt", "rf2_cirefset_sv"); extfileToTable.put("der2_ssRefset_ModuleDependencyEXTFull_INT_DATE.txt", "rf2_ssrefset_sv"); } public static Map<String, String>intExportMap = new HashMap<String, String>(); { // The slashes will be replaced with the OS appropriate separator at export time intExportMap.put(outputFolderTemplate + "/Terminology/Content/sct1_Concepts_Core_INT_DATE.txt", "select CONCEPTID, CONCEPTSTATUS, FULLYSPECIFIEDNAME, CTV3ID, SNOMEDID, ISPRIMITIVE from rf21_concept"); intExportMap .put(RELATIONSHIP_FILENAME, "select RELATIONSHIPID,CONCEPTID1,RELATIONSHIPTYPE,CONCEPTID2,CHARACTERISTICTYPE,REFINABILITY,RELATIONSHIPGROUP from rf21_rel"); } public static Map<String, String> extExportMap = new HashMap<String, String>(); { // The slashes will be replaced with the OS appropriate separator at export time extExportMap .put(outputFolderTemplate + "/Terminology/Content/sct1_Descriptions_LNG_INT_DATE.txt", "select DESCRIPTIONID, DESCRIPTIONSTATUS, CONCEPTID, TERM, INITIALCAPITALSTATUS, DESC_TYPE as DESCRIPTIONTYPE, LANGUAGECODE from rf21_term"); extExportMap.put(outputFolderTemplate + "/Terminology/History/sct1_References_Core_INT_DATE.txt", "select COMPONENTID, REFERENCETYPE, REFERENCEDID from rf21_REFERENCE"); extExportMap .put(outputFolderTemplate + "/Resources/StatedRelationships/res1_StatedRelationships_Core_INT_DATE.txt", "select RELATIONSHIPID,CONCEPTID1,RELATIONSHIPTYPE,CONCEPTID2,CHARACTERISTICTYPE,REFINABILITY,RELATIONSHIPGROUP from rf21_stated_rel"); } public static void main(String[] args) throws RF1ConversionException { //Set Windows Line separator as that's an RF1 standard System.setProperty("line.separator", "\r\n"); ConversionManager cm = new ConversionManager(); cm.askForLateralityFile(); cm.doRf2toRf1Conversion(args); } private void doRf2toRf1Conversion(String[] args) throws RF1ConversionException { File tempDBLocation = Files.createTempDir(); init(args, tempDBLocation); createDatabaseSchema(); File intLoadingArea = null; File extloadingArea = null; File exportArea = null; Stopwatch stopwatch = Stopwatch.createStarted(); String completionStatus = "failed"; try { print("\nExtracting RF2 International Edition Data..."); intLoadingArea = unzipArchive(intRf2Archive); intReleaseDate = findDateInString(intLoadingArea.listFiles()[0].getName(), false); determineEdition(intLoadingArea, Edition.INTERNATIONAL, intReleaseDate); if (extRf2Archive != null) { print("\nExtracting RF2 Extension Data..."); extloadingArea = unzipArchive(extRf2Archive); extReleaseDate = findDateInString(extloadingArea.listFiles()[0].getName(), false); determineEdition(extloadingArea, null, extReleaseDate); isExtension = true; } String releaseDate = isExtension ? extReleaseDate : intReleaseDate; int releaseIndex = calculateReleaseIndex(releaseDate); EditionConfig config = knownEditionMap.get(edition); int newSubsetVersion = 0; if (previousRF1Location != null) { useRelationshipIds = true; //This will allow us to set up SubsetIds (using available_sctids_partition_03) //And a map of existing relationship Ids to use for reconciliation loadPreviousRF1(config); //Initialise a set of available SCTIDS InputStream availableRelIds = ConversionManager.class.getResourceAsStream(AVAILABLE_RELATIONSHIP_IDS); RF1Constants.intialiseAvailableRelationships(availableRelIds); newSubsetVersion = previousSubsetVersion + 1; } else { useDeterministicSubsetIds(releaseIndex, config); newSubsetVersion = previousSubsetVersion + releaseIndex; } db.runStatement("SET @useRelationshipIds = " + useRelationshipIds); setSubsetIds(newSubsetVersion); long maxOperations = getMaxOperations(); if (onlyHistory) { maxOperations = 250; } else if (isExtension) { maxOperations = includeHistory? maxOperations : 388; } else { maxOperations = includeHistory? maxOperations : 391; } setMaxOperations(maxOperations); completeOutputMap(config); db.runStatement("SET @langCode = '" + config.langCode + "'"); db.runStatement("SET @langRefSet = '" + config.dialects[0].langRefSetId + "'"); print("\nLoading " + Edition.INTERNATIONAL +" common RF2 Data..."); loadRF2Data(intLoadingArea, Edition.INTERNATIONAL, intReleaseDate, intfileToTable); //Load the rest of the files from the same loading area if International Release, otherwise use the extensionLoading Area File loadingArea = isExtension ? extloadingArea : intLoadingArea; print("\nLoading " + edition +" RF2 Data..."); loadRF2Data(loadingArea, edition, releaseDate, extfileToTable); debug("\nCreating RF2 indexes..."); db.executeResource("create_rf2_indexes.sql"); if (!onlyHistory) { print("\nCalculating RF2 snapshot..."); calculateRF2Snapshot(releaseDate); } print("\nConverting RF2 to RF1..."); convert(); print("\nExporting RF1 to file..."); exportArea = Files.createTempDir(); exportRF1Data(intExportMap, releaseDate, intReleaseDate, knownEditionMap.get(edition), exportArea); exportRF1Data(extExportMap, releaseDate, releaseDate, knownEditionMap.get(edition), exportArea); String filePath = getQualifyingRelationshipFilepath(releaseDate, knownEditionMap.get(edition), exportArea); if (includeAllQualifyingRelationships || includeLateralityIndicators) { print("\nLoading Inferred Relationship Hierarchy for Qualifying Relationship computation..."); loadRelationshipHierarchy(intLoadingArea); } if (includeAllQualifyingRelationships) { print ("\nGenerating qualifying relationships"); Set<QualifyingRelationshipAttribute> ruleAttributes = loadQualifyingRelationshipRules(); generateQualifyingRelationships(ruleAttributes, filePath); } if (includeLateralityIndicators) { print ("\nGenerating laterality qualifying relationships"); generateLateralityRelationships(filePath); } if (additionalFilesLocation != null) { includeAdditionalFiles(exportArea, releaseDate, knownEditionMap.get(edition)); } print("\nZipping archive"); createArchive(exportArea); completionStatus = "completed"; if (goInteractive) { doInteractive(); } } finally { print("\nProcess " + completionStatus + " in " + stopwatch + " after completing " + getProgress() + "/" + getMaxOperations() + " operations."); print("Cleaning up resources..."); try { db.shutDown(true); // Also deletes all files if (tempDBLocation != null && tempDBLocation.exists()) { tempDBLocation.delete(); } } catch (Exception e) { debug("Error while cleaning up database " + tempDBLocation.getPath() + e.getMessage()); } try { if (intLoadingArea != null && intLoadingArea.exists()) { FileUtils.deleteDirectory(intLoadingArea); } if (extloadingArea != null && extloadingArea.exists()) { FileUtils.deleteDirectory(extloadingArea); } if (exportArea != null && exportArea.exists()) { FileUtils.deleteDirectory(exportArea); } } catch (Exception e) { debug("Error while cleaning up loading/export Areas " + e.getMessage()); } } } private void doInteractive() { boolean quitDetected = false; StringBuilder buff = new StringBuilder(); try (Scanner in = new Scanner(System.in)) { print ("Enter sql command to run, terminate with semicolon or type quit; to finish"); while (!quitDetected) { buff.append(in.nextLine().trim()); if (buff.length() > 1 && buff.charAt(buff.length()-1) == ';') { String command = buff.toString(); if (command.equalsIgnoreCase("quit;")) { quitDetected = true; } else { try{ db.runStatement(command.toString()); } catch (Exception e) { e.printStackTrace(); } buff.setLength(0); } } else { buff.append(" "); } } } } private void completeOutputMap(EditionConfig editionConfig) { if (isExtension) { String archiveName = "SnomedCT_OUT_INT_DATE"; String folderName = "Language-" + editionConfig.langCode; String fileRoot = archiveName + File.separator + "Subsets" + File.separator + folderName + File.separator; String fileName = "der1_SubsetMembers_"+ editionConfig.langCode + "_INT_DATE.txt"; extExportMap.put(fileRoot + fileName, "select s.SubsetId, s.MemberID, s.MemberStatus, s.LinkedID from rf21_SUBSETS s, rf21_SUBSETLIST sl where s.SubsetOriginalId = sl.subsetOriginalId AND sl.languageCode = ''" + editionConfig.langCode + "'';"); fileName = "der1_Subsets_" + editionConfig.langCode + "_INT_DATE.txt"; extExportMap.put(fileRoot + fileName, "select sl.* from rf21_SUBSETLIST sl where languagecode = ''" + editionConfig.langCode + "'';"); extExportMap.put("SnomedCT_OUT_INT_DATE/Resources/TextDefinitions/sct1_TextDefinitions_LNG_INT_DATE.txt", "select * from rf21_DEF"); } else { extExportMap.put("SnomedCT_OUT_INT_DATE/Resources/TextDefinitions/sct1_TextDefinitions_en-US_INT_DATE.txt", "select * from rf21_DEF"); extExportMap .put("SnomedCT_OUT_INT_DATE/Subsets/Language-en-GB/der1_SubsetMembers_en-GB_INT_DATE.txt", "select s.SubsetId, s.MemberID, s.MemberStatus, s.LinkedID from rf21_SUBSETS s, rf21_SUBSETLIST sl where s.SubsetOriginalId = sl.subsetOriginalId AND sl.languageCode in (''en'',''en-GB'')"); extExportMap.put("SnomedCT_OUT_INT_DATE/Subsets/Language-en-GB/der1_Subsets_en-GB_INT_DATE.txt", "select sl.* from rf21_SUBSETLIST sl where languagecode like ''%GB%''"); extExportMap .put("SnomedCT_OUT_INT_DATE/Subsets/Language-en-US/der1_SubsetMembers_en-US_INT_DATE.txt", "select s.SubsetId, s.MemberID, s.MemberStatus, s.LinkedID from rf21_SUBSETS s, rf21_SUBSETLIST sl where s.SubsetOriginalId = sl.subsetOriginalId AND sl.languageCode in (''en'',''en-US'')"); extExportMap.put("SnomedCT_RF1Release_INT_DATE/Subsets/Language-en-US/der1_Subsets_en-US_INT_DATE.txt", "select sl.* from rf21_SUBSETLIST sl where languagecode like ''%US%''"); } if (includeHistory) { extExportMap.put("SnomedCT_OUT_INT_DATE/Terminology/History/sct1_ComponentHistory_Core_INT_DATE.txt", "select COMPONENTID, RELEASEVERSION, CHANGETYPE, STATUS, REASON from rf21_COMPONENTHISTORY"); } } private void determineEdition(File loadingArea, Edition enforceEdition, String releaseDate) throws RF1ConversionException { //Loop through known editions and see if EDITION_DETERMINER file is present for (Map.Entry<Edition, EditionConfig> thisEdition : knownEditionMap.entrySet()) for (File thisFile : loadingArea.listFiles()) { EditionConfig parts = thisEdition.getValue(); String target = EDITION_DETERMINER.replace(EXT, parts.editionName) .replace(LNG, parts.langCode) .replace(DATE, releaseDate); if (thisFile.getName().equals(target)) { this.edition = thisEdition.getKey(); if (enforceEdition != null && this.edition != enforceEdition) { throw new RF1ConversionException("Needed " + enforceEdition + ", instead found " + this.edition); } return; } } throw new RF1ConversionException ("Failed to fine file matching any known edition: " + EDITION_DETERMINER + " in" + loadingArea.getAbsolutePath()); } private File unzipArchive(File archive) throws RF1ConversionException { File tempDir = null; try { if (unzipLocation != null) { tempDir = java.nio.file.Files.createTempDirectory(unzipLocation.toPath(), "rf2-to-rf1-").toFile(); } else { // Work in the traditional temp file location for the OS tempDir = Files.createTempDir(); } } catch (IOException e) { throw new RF1ConversionException("Unable to create temporary directory for archive extration"); } // We only need to work with the full files //...mostly, we also need the Snapshot Relationship file in order to work out the Qualifying Relationships unzipFlat(archive, tempDir, new String[]{"Full","sct2_Relationship_Snapshot"}); return tempDir; } private void createDatabaseSchema() throws RF1ConversionException { print("Creating database schema"); db.executeResource("create_rf2_schema.sql"); } private void calculateRF2Snapshot(String releaseDate) throws RF1ConversionException { String setDateSql = "SET @RDATE = " + releaseDate; db.runStatement(setDateSql); db.executeResource("create_rf2_snapshot.sql"); db.executeResource("populate_subset_2_refset.sql"); } private void convert() throws RF1ConversionException { db.executeResource("create_rf1_schema.sql"); if (includeHistory) { db.executeResource("populate_rf1_historical.sql"); } else { print("\nSkipping generation of RF1 History. Set -h parameter if this is required."); } if (!onlyHistory) { db.executeResource("populate_rf1.sql"); if (isExtension) { db.executeResource("populate_rf1_ext_descriptions.sql"); } else { db.executeResource("populate_rf1_int_descriptions.sql"); } db.executeResource("populate_rf1_associations.sql"); } } private void init(String[] args, File dbLocation) throws RF1ConversionException { if (args.length < 1) { print("Usage: java ConversionManager [-v] [-h] [-b] [-i] [-q] [-a <additional files location>] [-p <previous RF1 archive] [-u <unzip location>] <rf2 archive location> [<rf2 extension archive>]"); print(" b - beta indicator, causes an x to be prepended to output filenames"); print(" p - previous RF1 archive required for SubsetId and Relationship Id generation"); exit(); } boolean isUnzipLocation = false; boolean isAdditionalFilesLocation = false; boolean isPreviousRF1Location = false; for (String thisArg : args) { if (thisArg.equals("-v")) { GlobalUtils.verbose = true; } else if (thisArg.equals("-i")) { goInteractive = true; } else if (thisArg.equals("-H")) { includeHistory = true; onlyHistory = true; } else if (thisArg.equals("-b")) { isBeta = true; }else if (thisArg.equals("-u")) { isUnzipLocation = true; } else if (thisArg.equals("-a")) { isAdditionalFilesLocation = true; } else if (thisArg.equals("-p")) { isPreviousRF1Location = true; } else if (thisArg.equals("-q")) { includeAllQualifyingRelationships = true; } else if (isUnzipLocation) { unzipLocation = new File(thisArg); if (!unzipLocation.isDirectory()) { throw new RF1ConversionException(thisArg + " is an invalid location to unzip archive to!"); } isUnzipLocation = false; } else if (isAdditionalFilesLocation) { additionalFilesLocation = new File(thisArg); if (!additionalFilesLocation.isDirectory()) { throw new RF1ConversionException(thisArg + " is an invalid location to find additional files."); } isAdditionalFilesLocation = false; } else if (isPreviousRF1Location) { previousRF1Location = new File(thisArg); if (!previousRF1Location.exists() || !previousRF1Location.canRead()) { throw new RF1ConversionException(thisArg + " does not appear to be a valid RF1 archive."); } isPreviousRF1Location = false; } else if (intRf2Archive == null){ File possibleArchive = new File(thisArg); if (possibleArchive.exists() && !possibleArchive.isDirectory() && possibleArchive.canRead()) { intRf2Archive = possibleArchive; } } else { File possibleArchive = new File(thisArg); if (possibleArchive.exists() && !possibleArchive.isDirectory() && possibleArchive.canRead()) { extRf2Archive = possibleArchive; } } } if (intRf2Archive == null) { print("Unable to determine RF2 Archive: " + args[args.length - 1]); exit(); } db = new DBManager(); db.init(dbLocation); } private void loadRF2Data(File loadingArea, Edition edition, String releaseDate, Map<String, String> fileToTable) throws RF1ConversionException { // We can do the load in parallel. Only 3 threads because heavily I/O db.startParallelProcessing(3); for (Map.Entry<String, String> entry : fileToTable.entrySet()) { // Replace DATE in the filename with the actual release date String fileName = entry.getKey().replace(DATE, releaseDate) .replace(EXT, knownEditionMap.get(edition).editionName) .replace(LNG, knownEditionMap.get(edition).langCode); File file = new File(loadingArea + File.separator + fileName); //Only load each file once if (filesLoaded.contains(file)) { debug ("Skipping " + file.getName() + " already loaded as part of Internation Edition"); } else if (file.exists()) { db.load(file, entry.getValue()); filesLoaded.add(file); } else { print("\nWarning, skipping load of file " + file.getName() + " - not present"); } } db.finishParallelProcessing(); } private void exportRF1Data(Map<String, String> exportMap, String packageReleaseDate, String fileReleaseDate, EditionConfig editionConfig, File exportArea) throws RF1ConversionException { // We can do the export in parallel. Only 3 threads because heavily I/O db.startParallelProcessing(3); for (Map.Entry<String, String> entry : exportMap.entrySet()) { // Replace DATE in the filename with the actual release date String fileName = entry.getKey().replaceFirst(DATE, packageReleaseDate) .replace(DATE, fileReleaseDate) .replace(OUT, editionConfig.outputName) .replace(LNG, editionConfig.langCode); fileName = modifyFilenameIfBeta(fileName); String filePath = exportArea + File.separator + fileName; //If we're doing the history file, then we need to prepend the static //resource file InputStream isInclude = null; if (includeHistory && fileName.contains("ComponentHistory")) { isInclude = ConversionManager.class.getResourceAsStream(ANCIENT_HISTORY); if (isInclude == null) { throw new RF1ConversionException("Unable to obtain history file: " + ANCIENT_HISTORY); } } db.export(filePath, entry.getValue(), isInclude); } db.finishParallelProcessing(); } private String modifyFilenameIfBeta(String fileName) { if (isBeta) { //Beta prefix before the file shortname, but also for the leading directory int lastSlash = fileName.lastIndexOf(File.separator) + 1; fileName = BETA_PREFIX + fileName.substring(0,lastSlash) + BETA_PREFIX + fileName.substring(lastSlash); } return fileName; } private void loadRelationshipHierarchy(File intLoadingArea) throws RF1ConversionException { String fileName = intLoadingArea.getAbsolutePath() + File.separator + "sct2_Relationship_Snapshot_INT_DATE.txt"; fileName = fileName.replace(DATE, intReleaseDate); GraphLoader gl = new GraphLoader (fileName); gl.loadRelationships(); } private Set<QualifyingRelationshipAttribute> loadQualifyingRelationshipRules() { GsonBuilder gsonBuilder = new GsonBuilder(); gsonBuilder.registerTypeAdapter(Concept.class, new ConceptDeserializer()); Gson gson = gsonBuilder.create(); InputStream jsonStream = ConversionManager.class.getResourceAsStream(QUALIFYING_RULES); BufferedReader jsonReader = new BufferedReader(new InputStreamReader(jsonStream)); Type listType = new TypeToken<Set<QualifyingRelationshipAttribute>>() {}.getType(); Set<QualifyingRelationshipAttribute> attributes = gson.fromJson(jsonReader, listType); return attributes; } private void loadLateralityIndicators(File lateralityFile) throws RF1ConversionException { try (BufferedReader br = new BufferedReader(new FileReader(lateralityFile))) { String line; boolean firstLine = true; while ((line = br.readLine()) != null) { if (!firstLine) { LateralityIndicator.registerIndicator(line); } else { firstLine = false; } } } catch (IOException ioe) { throw new RF1ConversionException ("Unable to import laterality reference file " + lateralityFile.getAbsolutePath(), ioe); } } private void generateQualifyingRelationships( Set<QualifyingRelationshipAttribute> ruleAttributes, String filePath) throws RF1ConversionException { //For each attribute, work through each rule creating rules for self and all children of starting points, //except for exceptions try(FileWriter fw = new FileWriter(filePath, true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { for (QualifyingRelationshipAttribute thisAttribute : ruleAttributes) { StringBuffer commonRF1 = new StringBuffer().append(FIELD_DELIMITER) .append(thisAttribute.getType().getSctId()).append(FIELD_DELIMITER) .append(thisAttribute.getDestination().getSctId()).append(FIELD_DELIMITER) .append("1\t")//Qualifying Rel type .append(thisAttribute.getRefinability()).append("\t0"); //Refineable, Group 0 for (QualifyingRelationshipRule thisRule : thisAttribute.getRules()) { Set<Concept> potentialApplications = thisRule.getStartPoint().getAllDescendents(Concept.DEPTH_NOT_SET); Collection<Concept> ruleAppliedTo = CollectionUtils.subtract(potentialApplications, thisRule.getExceptions()); for (Concept thisException : thisRule.getExceptions()) { Set<Concept> exceptionDescendents = thisException.getAllDescendents(Concept.DEPTH_NOT_SET); ruleAppliedTo = CollectionUtils.subtract(ruleAppliedTo, exceptionDescendents); } //Now the remaining concepts that the rules applies to can be written out to file for (Concept thisConcept : ruleAppliedTo) { //Concept may already have this attribute as a defining relationship, skip if so. if (!thisConcept.hasAttribute(thisAttribute)) { String rf1Line = FIELD_DELIMITER + thisConcept.getSctId() + commonRF1; out.println(rf1Line); } } } } } catch (IOException e) { throw new RF1ConversionException ("Failure while outputting Qualifying Relationships: " + e.toString()); } } private void generateLateralityRelationships(String filePath) throws RF1ConversionException { //Check every concept to see if has a laterality indicator, and doesn't already have that //attribute as a defining relationship Set<Concept> allConcepts = Concept.getConcept(SNOMED_ROOT_CONCEPT).getAllDescendents(Concept.DEPTH_NOT_SET); StringBuffer commonRF1 = new StringBuffer().append(FIELD_DELIMITER) .append(LATERALITY_ATTRIB).append(FIELD_DELIMITER) .append(SIDE_VALUE).append(FIELD_DELIMITER) .append("1\t")//Qualifying Rel type .append(RF1Constants.MUST_REFINE).append("\t0"); //Refineable, Group 0 Concept lat = Concept.getConcept(Long.parseLong(LATERALITY_ATTRIB)); Concept side = Concept.getConcept(Long.parseLong(SIDE_VALUE)); QualifyingRelationshipAttribute LateralityAttribute = new QualifyingRelationshipAttribute (lat, side, RF1Constants.MUST_REFINE); try(FileWriter fw = new FileWriter(filePath, true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { for (Concept thisConcept : allConcepts) { if (LateralityIndicator.hasLateralityIndicator(thisConcept.getSctId(), LateralityIndicator.Lattomidsag.YES)) { if (!thisConcept.hasAttribute(LateralityAttribute)) { String relId = RF1Constants.lookupRelationshipId(thisConcept.getSctId().toString(), LATERALITY_ATTRIB, SIDE_VALUE, UNGROUPED); String rf1Line = relId + FIELD_DELIMITER + thisConcept.getSctId() + commonRF1; out.println(rf1Line); } } } }catch (IOException e){ throw new RF1ConversionException ("Failure while output Laterality Relationships: " + e.toString()); } } private String getQualifyingRelationshipFilepath(String releaseDate, EditionConfig editionConfig, File exportArea) throws RF1ConversionException { // Replace DATE in the filename with the actual release date String fileName = RELATIONSHIP_FILENAME.replaceFirst(DATE, releaseDate) .replace(DATE, releaseDate) .replace(OUT, editionConfig.outputName) .replace(LNG, editionConfig.langCode); fileName = modifyFilenameIfBeta(fileName); String filePath = exportArea + File.separator + fileName; File outputFile = new File(filePath); try{ if (!outputFile.exists()) { outputFile.getParentFile().mkdirs(); outputFile.createNewFile(); } } catch (IOException e) { throw new RF1ConversionException("Unable to create file for Qualifying Relationships: " + e); } return filePath; } private void askForLateralityFile() { try (Scanner in = new Scanner(System.in)) { print ("Do you wish to create Lateralized Qualifying Relationships? [Y/N]: "); String response = in.nextLine().trim(); if (response.toUpperCase().equals("Y")) { print ("Please provide matching laterality reference file location: "); String latFileLPath = in.nextLine().trim(); File lateralityFile = new File(latFileLPath); if (!lateralityFile.exists()) { print ("File not found: " + latFileLPath); askForLateralityFile(); } else { try{ loadLateralityIndicators(lateralityFile); includeLateralityIndicators = true; } catch (Exception e) { print ("Failed to load Laterality text file due to " + e.getMessage()); askForLateralityFile(); } } } } } private void includeAdditionalFiles(File outputDirectory, String releaseDate, EditionConfig editionConfig){ Map<String, String> targetLocation = new HashMap<String, String>(); targetLocation.put(".pdf", "Documentation/"); targetLocation.put("KeyIndex_", "Resources/Indexes/"); targetLocation.put("Canonical", "Resources/Canonical Table/"); String rootPath = outputDirectory.getAbsolutePath() + File.separator + (isBeta?BETA_PREFIX:"") + outputFolderTemplate + File.separator; rootPath = rootPath.replace(OUT, editionConfig.outputName) .replace(DATE, releaseDate); File[] directoryListing = additionalFilesLocation.listFiles(); if (directoryListing != null) { for (File child : directoryListing) { String childFilename = child.getName(); //Do we know to put this file in a particular location? //otherwise path will remain the root path for (String match : targetLocation.keySet()) { if (childFilename.contains(match)) { childFilename = targetLocation.get(match) + childFilename; break; } } //Ensure path exists for where file is being copied to File copiedFile = new File (rootPath + childFilename); copiedFile.getParentFile().mkdirs(); try { FileUtils.copyFile(child, copiedFile); print ("Copied additional file to " + copiedFile.getAbsolutePath()); } catch (IOException e) { print ("Unable to copy additional file " + childFilename + " due to " + e.getMessage()); } } } } private void loadPreviousRF1(EditionConfig config) throws RF1ConversionException { try { ZipInputStream zis = new ZipInputStream(new FileInputStream(previousRF1Location)); ZipEntry ze = zis.getNextEntry(); try { while (ze != null) { if (!ze.isDirectory()) { Path p = Paths.get(ze.getName()); String fileName = p.getFileName().toString(); if (fileName.contains("der1_Subsets")) { updateSubsetIds(zis, config); } else if (fileName.contains("sct1_Relationships")) { //We need to use static methods here so that H2 can access as functions. print ("\nLoading previous RF1 relationships"); RF1Constants.loadPreviousRelationships(zis); } } ze = zis.getNextEntry(); } } finally { zis.closeEntry(); zis.close(); } } catch (IOException e) { throw new RF1ConversionException("Failed to load previous RF1 archive " + previousRF1Location.getName(), e); } } private void updateSubsetIds(ZipInputStream zis, EditionConfig config) throws NumberFormatException, IOException { //This function will also pick up and set the previous subset version Long subsetId = loadSubsetsFile(zis); //Do we need to recover a new set of subsetIds? if (maxPreviousSubsetId == null || subsetId > maxPreviousSubsetId) { maxPreviousSubsetId = subsetId; InputStream is = ConversionManager.class.getResourceAsStream(AVAILABLE_SUBSET_IDS); try (BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))){ String line; int subsetIdsSet = 0; subsetIds = new Long[config.dialects.length]; while ((line = br.readLine()) != null && subsetIdsSet < config.dialects.length) { Long thisAvailableSubsetId = Long.parseLong(line.trim()); if (thisAvailableSubsetId.compareTo(maxPreviousSubsetId) > 0) { debug ("Obtaining new Subset Ids from resource file"); subsetIds[subsetIdsSet] = thisAvailableSubsetId; subsetIdsSet++; } } } } } /* * @return the greatest subsetId in the file */ private Long loadSubsetsFile(ZipInputStream zis) throws IOException { Long maxSubsetIdInFile = null; BufferedReader br = new BufferedReader(new InputStreamReader(zis, StandardCharsets.UTF_8)); String line; boolean isFirstLine = true; while ((line = br.readLine()) != null) { if (isFirstLine) { isFirstLine = false; continue; } String[] lineItems = line.split(FIELD_DELIMITER); //SubsetId is the first column Long thisSubsetId = Long.parseLong(lineItems[RF1_IDX_SUBSETID]); if (maxSubsetIdInFile == null || thisSubsetId > maxSubsetIdInFile) { maxSubsetIdInFile = thisSubsetId; } //SubsetVersion is the 3rd int thisSubsetVersion = Integer.parseInt(lineItems[RF1_IDX_SUBSETVERSION]); if (thisSubsetVersion > previousSubsetVersion) { previousSubsetVersion = thisSubsetVersion; } } return maxSubsetIdInFile; } private void useDeterministicSubsetIds(int releaseIndex, EditionConfig config) throws RF1ConversionException { try { InputStream is = ConversionManager.class.getResourceAsStream(AVAILABLE_SUBSET_IDS); try (BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))){ String line; int subsetIdsSet = 0; subsetIds = new Long[config.dialects.length]; int filePos = 0; while ((line = br.readLine()) != null && subsetIdsSet < config.dialects.length) { filePos++; Long thisAvailableSubsetId = Long.parseLong(line.trim()); if (filePos >= releaseIndex) { debug ("Obtaining new Subset Ids from resource file"); subsetIds[subsetIdsSet] = thisAvailableSubsetId; subsetIdsSet++; } } } } catch (IOException e) { throw new RF1ConversionException("Unable to determine new subset Ids",e); } } private void setSubsetIds(int newSubsetVersion) { for (int i=0 ; i<subsetIds.length; i++) { db.runStatement("SET @SUBSETID_" + (i+1) + " = " + subsetIds[i]); } db.runStatement("SET @SUBSET_VERSION = " + newSubsetVersion); } int calculateReleaseIndex(String releaseDate) { //returns a number that can be used when a previous release is not available //to give an incrementing variable that we can use to move through the SCTID 02 & 03 files int year = Integer.parseInt(releaseDate.substring(0, 4)); int month = Integer.parseInt(releaseDate.substring(4,6)); int index = ((year - 2016)*10) + month; return index; } }
Only include laterality ids if previous RF1 file is present
src/main/java/org/ihtsdo/snomed/rf2torf1conversion/ConversionManager.java
Only include laterality ids if previous RF1 file is present
Java
apache-2.0
c6ad2b958748719bcbe6afb3d175f9a96d15541c
0
TomasHofman/undertow,msfm/undertow,amannm/undertow,wildfly-security-incubator/undertow,marschall/undertow,golovnin/undertow,marschall/undertow,n1hility/undertow,jstourac/undertow,yonglehou/undertow,soul2zimate/undertow,baranowb/undertow,baranowb/undertow,jasonchaffee/undertow,undertow-io/undertow,marschall/undertow,jamezp/undertow,aldaris/undertow,stuartwdouglas/undertow,jstourac/undertow,jstourac/undertow,n1hility/undertow,amannm/undertow,aldaris/undertow,grassjedi/undertow,baranowb/undertow,msfm/undertow,pedroigor/undertow,grassjedi/undertow,stuartwdouglas/undertow,amannm/undertow,golovnin/undertow,golovnin/undertow,stuartwdouglas/undertow,rhatlapa/undertow,nkhuyu/undertow,nkhuyu/undertow,aradchykov/undertow,rogerchina/undertow,grassjedi/undertow,pferraro/undertow,rhusar/undertow,yonglehou/undertow,n1hility/undertow,ctomc/undertow,undertow-io/undertow,rhatlapa/undertow,rhusar/undertow,Karm/undertow,biddyweb/undertow,msfm/undertow,soul2zimate/undertow,darranl/undertow,jamezp/undertow,aradchykov/undertow,darranl/undertow,aradchykov/undertow,pedroigor/undertow,aldaris/undertow,biddyweb/undertow,Karm/undertow,jamezp/undertow,rhusar/undertow,TomasHofman/undertow,popstr/undertow,soul2zimate/undertow,rhatlapa/undertow,ctomc/undertow,ctomc/undertow,darranl/undertow,nkhuyu/undertow,undertow-io/undertow,wildfly-security-incubator/undertow,jasonchaffee/undertow,pferraro/undertow,popstr/undertow,pedroigor/undertow,yonglehou/undertow,Karm/undertow,TomasHofman/undertow,popstr/undertow,rogerchina/undertow,wildfly-security-incubator/undertow,pferraro/undertow,biddyweb/undertow,jasonchaffee/undertow,rogerchina/undertow
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.session; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; import java.util.concurrent.CopyOnWriteArrayList; import io.undertow.server.HttpServerExchange; /** * Utility class that maintains the session listeners. * * * @author Stuart Douglas */ public class SessionListeners { private final List<SessionListener> sessionListeners = new CopyOnWriteArrayList<>(); public void addSessionListener(final SessionListener listener) { this.sessionListeners.add(listener); } public boolean removeSessionListener(final SessionListener listener) { return this.sessionListeners.remove(listener); } public void clear() { this.sessionListeners.clear(); } public void sessionCreated(final Session session, final HttpServerExchange exchange) { for (SessionListener listener : sessionListeners) { listener.sessionCreated(session, exchange); } } public void sessionDestroyed(final Session session, final HttpServerExchange exchange, SessionListener.SessionDestroyedReason reason) { // We need to create our own snapshot to safely iterate over a concurrent list in reverse List<SessionListener> listeners = new ArrayList<>(sessionListeners); ListIterator<SessionListener> iterator = listeners.listIterator(listeners.size()); while (iterator.hasPrevious()) { iterator.previous().sessionDestroyed(session, exchange, reason); } } public void attributeAdded(final Session session, final String name, final Object value) { for (SessionListener listener : sessionListeners) { listener.attributeAdded(session, name, value); } } public void attributeUpdated(final Session session, final String name, final Object newValue, final Object oldValue) { for (SessionListener listener : sessionListeners) { listener.attributeUpdated(session, name, newValue, oldValue); } } public void attributeRemoved(final Session session, final String name, final Object oldValue) { for (SessionListener listener : sessionListeners) { listener.attributeRemoved(session, name, oldValue); } } public void sessionIdChanged(final Session session, final String oldSessionId) { for (SessionListener listener : sessionListeners) { listener.sessionIdChanged(session, oldSessionId); } } }
core/src/main/java/io/undertow/server/session/SessionListeners.java
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.session; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.ListIterator; import java.util.concurrent.CopyOnWriteArrayList; import io.undertow.server.HttpServerExchange; /** * Utility class that maintains the session listeners. * * * @author Stuart Douglas */ public class SessionListeners { private final List<SessionListener> sessionListeners = new CopyOnWriteArrayList<>(); public void addSessionListener(final SessionListener listener) { this.sessionListeners.add(listener); } public boolean removeSessionListener(final SessionListener listener) { return this.sessionListeners.remove(listener); } public void clear() { this.sessionListeners.clear(); } public void sessionCreated(final Session session, final HttpServerExchange exchange) { for (SessionListener listener : sessionListeners) { listener.sessionCreated(session, exchange); } } public void sessionDestroyed(final Session session, final HttpServerExchange exchange, SessionListener.SessionDestroyedReason reason) { // We need to create our own snapshot to safely iterate over a concurrent list in reverse List<SessionListener> listeners = new ArrayList<>(sessionListeners); ListIterator<SessionListener> iterator = listeners.listIterator(listeners.size()); while (iterator.hasPrevious()) { iterator.previous().sessionDestroyed(session, exchange, reason); } } public void attributeAdded(final Session session, final String name, final Object value) { for (SessionListener listener : sessionListeners) { listener.attributeAdded(session, name, value); } } public void attributeUpdated(final Session session, final String name, final Object newValue, final Object oldValue) { for (SessionListener listener : sessionListeners) { listener.attributeUpdated(session, name, newValue, oldValue); } } public void attributeRemoved(final Session session, final String name, final Object oldValue) { for (SessionListener listener : sessionListeners) { listener.attributeRemoved(session, name, oldValue); } } public void sessionIdChanged(final Session session, final String oldSessionId) { for (SessionListener listener : sessionListeners) { listener.sessionIdChanged(session, oldSessionId); } } }
Checkstyle
core/src/main/java/io/undertow/server/session/SessionListeners.java
Checkstyle
Java
apache-2.0
e91f61e57467867a4f4d45ed4b029a51ab343914
0
hakeemsm/hadoop-ball,hakeemsm/hadoop-ball,hakeemsm/hadoop-ball
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import java.io.IOException; import java.util.StringTokenizer; // >>> Don't Change public class OrphanPages extends Configured implements Tool { public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new OrphanPages(), args); System.exit(res); } // <<< Don't Change @Override public int run(String[] args) throws Exception { //TODO Job job = Job.getInstance(this.getConf(), "Orphan Pages"); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(IntWritable.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setMapperClass(LinkCountMap.class); job.setReducerClass(OrphanPageReduce.class); FileInputFormat.setInputPaths(job,new Path(args[0])); FileOutputFormat.setOutputPath(job,new Path(args[1])); job.setJarByClass(OrphanPages.class); return job.waitForCompletion(true) ? 0 : 1; } public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> { @Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { //TODO String entry = value.toString(); String[] pair = entry.split(":",2); String[] srcLinks = pair[1].split(" "); for (String str: srcLinks) { context.write(new IntWritable(Integer.parseInt(pair[0])), new IntWritable(Integer.parseInt(str))); } } } public static class OrphanPageReduce extends Reducer<IntWritable, IntWritable, IntWritable, NullWritable> { @Override public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { //TODO context.write(key); } } }
OrphanPages.java
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import java.io.IOException; import java.util.StringTokenizer; // >>> Don't Change public class OrphanPages extends Configured implements Tool { public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new OrphanPages(), args); System.exit(res); } // <<< Don't Change @Override public int run(String[] args) throws Exception { //TODO } public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> { @Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { //TODO } } public static class OrphanPageReduce extends Reducer<IntWritable, IntWritable, IntWritable, NullWritable> { @Override public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { //TODO } } }
orphan pages code
OrphanPages.java
orphan pages code
Java
apache-2.0
f16acafa213a2847c6ff65ea998c97a7008f996a
0
nihrom205/java-a-to-z,nihrom205/java-a-to-z
package ru.job4j.tree; import java.util.ArrayList; import java.util.List; /** * Class Node. * * @author Alexey Rastorguev ([email protected]) * @version 0.1 * @since 28.12.2017 */ public class Node<E> { private final List<Node<E>> children = new ArrayList<>(); private final E value; public Node(final E value) { this.value = value; } public void add(Node<E> child) { this.children.add(child); } public List<Node<E>> leaves() { return this.children; } public boolean eqValue(E that) { return this.value.equals(that); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Node<?> node = (Node<?>) o; return value != null ? value.equals(node.value) : node.value == null; } @Override public int hashCode() { int result = 17; result = 31 * result + (value != null ? value.hashCode() : 0); return result; } }
chapter_005/src/main/java/ru/job4j/tree/Node.java
package ru.job4j.tree; import java.util.ArrayList; import java.util.List; /** * Class Node. * * @author Alexey Rastorguev ([email protected]) * @version 0.1 * @since 28.12.2017 */ public class Node<E> { private final List<Node<E>> children = new ArrayList<>(); private final E value; public Node(final E value) { this.value = value; } public void add(Node<E> child) { this.children.add(child); } public List<Node<E>> leaves() { return this.children; } public boolean eqValue(E that) { return this.value.equals(that); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Node<?> node = (Node<?>) o; return value != null ? value.equals(node.value) : node.value == null; } @Override public int hashCode() { int result = 17; result = 31 * result + (value != null ? value.hashCode() : 0); return result; } }
modify Node, package tree.
chapter_005/src/main/java/ru/job4j/tree/Node.java
modify Node, package tree.
Java
apache-2.0
d0f70d4642c036b0fcb57d01d0a340878161801e
0
APriestman/autopsy,rcordovano/autopsy,rcordovano/autopsy,wschaeferB/autopsy,APriestman/autopsy,rcordovano/autopsy,dgrove727/autopsy,APriestman/autopsy,millmanorama/autopsy,esaunders/autopsy,wschaeferB/autopsy,millmanorama/autopsy,narfindustries/autopsy,wschaeferB/autopsy,millmanorama/autopsy,APriestman/autopsy,APriestman/autopsy,esaunders/autopsy,APriestman/autopsy,dgrove727/autopsy,dgrove727/autopsy,wschaeferB/autopsy,esaunders/autopsy,esaunders/autopsy,narfindustries/autopsy,rcordovano/autopsy,millmanorama/autopsy,rcordovano/autopsy,esaunders/autopsy,APriestman/autopsy,rcordovano/autopsy,narfindustries/autopsy,wschaeferB/autopsy
/* * Autopsy Forensic Browser * * Copyright 2011-2015 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.datamodel; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Observable; import java.util.Observer; import java.util.Set; import java.util.logging.Level; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskException; /** * Hash set hits node support. Inner classes have all of the nodes in the tree. */ public class HashsetHits implements AutopsyVisitableItem { private static final String HASHSET_HITS = BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getLabel(); private static final String DISPLAY_NAME = BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName(); private static final Logger logger = Logger.getLogger(HashsetHits.class.getName()); private SleuthkitCase skCase; private final HashsetResults hashsetResults; public HashsetHits(SleuthkitCase skCase) { this.skCase = skCase; hashsetResults = new HashsetResults(); } @Override public <T> T accept(AutopsyItemVisitor<T> v) { return v.visit(this); } /** * Stores all of the hashset results in a single class that is observable * for the child nodes */ private class HashsetResults extends Observable { // maps hashset name to list of artifacts for that set // NOTE: "hashSetHitsMap" object can be accessed by multiple threads and needs to be synchronized private final Map<String, Set<Long>> hashSetHitsMap = new LinkedHashMap<>(); HashsetResults() { update(); } List<String> getSetNames() { List<String> names; synchronized (hashSetHitsMap) { names = new ArrayList<>(hashSetHitsMap.keySet()); } Collections.sort(names); return names; } Set<Long> getArtifactIds(String hashSetName) { synchronized (hashSetHitsMap) { return hashSetHitsMap.get(hashSetName); } } @SuppressWarnings("deprecation") final void update() { synchronized (hashSetHitsMap) { hashSetHitsMap.clear(); if (skCase == null) { return; } int setNameId = ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(); int artId = ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID(); String query = "SELECT value_text,blackboard_attributes.artifact_id,attribute_type_id " //NON-NLS + "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS + "attribute_type_id=" + setNameId //NON-NLS + " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS + " AND blackboard_artifacts.artifact_type_id=" + artId; //NON-NLS try (CaseDbQuery dbQuery = skCase.executeQuery(query)) { ResultSet resultSet = dbQuery.getResultSet(); while (resultSet.next()) { String setName = resultSet.getString("value_text"); //NON-NLS long artifactId = resultSet.getLong("artifact_id"); //NON-NLS if (!hashSetHitsMap.containsKey(setName)) { hashSetHitsMap.put(setName, new HashSet<Long>()); } hashSetHitsMap.get(setName).add(artifactId); } } catch (TskCoreException | SQLException ex) { logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS } setChanged(); notifyObservers(); } } } /** * Top-level node for all hash sets */ public class RootNode extends DisplayableItemNode { public RootNode() { super(Children.create(new HashsetNameFactory(), true), Lookups.singleton(DISPLAY_NAME)); super.setName(HASHSET_HITS); super.setDisplayName(DISPLAY_NAME); this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hashset_hits.png"); //NON-NLS } @Override public boolean isLeafTypeNode() { return false; } @Override public <T> T accept(DisplayableItemNodeVisitor<T> v) { return v.visit(this); } @Override protected Sheet createSheet() { Sheet s = super.createSheet(); Sheet.Set ss = s.get(Sheet.PROPERTIES); if (ss == null) { ss = Sheet.createPropertiesSet(); s.put(ss); } ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.desc"), getName())); return s; } /* * TODO (AUT-1849): Correct or remove peristent column reordering code * * Added to support this feature. */ // @Override // public String getItemType() { // return "HashsetRoot"; //NON-NLS // } } /** * Creates child nodes for each hashset name */ private class HashsetNameFactory extends ChildFactory.Detachable<String> implements Observer { /* * This should probably be in the HashsetHits class, but the factory has * nice methods for its startup and shutdown, so it seemed like a * cleaner place to register the property change listener. */ private final PropertyChangeListener pcl = new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String eventType = evt.getPropertyName(); if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { /** * Checking for a current case is a stop gap measure until a * different way of handling the closing of cases is worked * out. Currently, remote events may be received for a case * that is already closed. */ try { Case.getCurrentCase(); /** * Due to some unresolved issues with how cases are * closed, it is possible for the event to have a null * oldValue if the event is a remote event. */ ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) { hashsetResults.update(); } } catch (IllegalStateException notUsed) { /** * Case is closed, do nothing. */ } } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { /** * Checking for a current case is a stop gap measure until a * different way of handling the closing of cases is worked * out. Currently, remote events may be received for a case * that is already closed. */ try { Case.getCurrentCase(); hashsetResults.update(); } catch (IllegalStateException notUsed) { /** * Case is closed, do nothing. */ } } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) { // case was closed. Remove listeners so that we don't get called with a stale case handle if (evt.getNewValue() == null) { removeNotify(); skCase = null; } } } }; @Override protected void addNotify() { IngestManager.getInstance().addIngestJobEventListener(pcl); IngestManager.getInstance().addIngestModuleEventListener(pcl); Case.addPropertyChangeListener(pcl); hashsetResults.update(); hashsetResults.addObserver(this); } @Override protected void removeNotify() { IngestManager.getInstance().removeIngestJobEventListener(pcl); IngestManager.getInstance().removeIngestModuleEventListener(pcl); Case.removePropertyChangeListener(pcl); hashsetResults.deleteObserver(this); } @Override protected boolean createKeys(List<String> list) { list.addAll(hashsetResults.getSetNames()); return true; } @Override protected Node createNodeForKey(String key) { return new HashsetNameNode(key); } @Override public void update(Observable o, Object arg) { refresh(true); } } /** * Node for a hash set name */ public class HashsetNameNode extends DisplayableItemNode implements Observer { private final String hashSetName; public HashsetNameNode(String hashSetName) { super(Children.create(new HitFactory(hashSetName), true), Lookups.singleton(hashSetName)); super.setName(hashSetName); this.hashSetName = hashSetName; updateDisplayName(); this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hashset_hits.png"); //NON-NLS hashsetResults.addObserver(this); } /** * Update the count in the display name */ private void updateDisplayName() { super.setDisplayName(hashSetName + " (" + hashsetResults.getArtifactIds(hashSetName).size() + ")"); } @Override public boolean isLeafTypeNode() { return true; } @Override protected Sheet createSheet() { Sheet s = super.createSheet(); Sheet.Set ss = s.get(Sheet.PROPERTIES); if (ss == null) { ss = Sheet.createPropertiesSet(); s.put(ss); } ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.desc"), getName())); return s; } @Override public <T> T accept(DisplayableItemNodeVisitor<T> v) { return v.visit(this); } @Override public void update(Observable o, Object arg) { updateDisplayName(); } /* * TODO (AUT-1849): Correct or remove peristent column reordering code * * Added to support this feature. */ // @Override // public String getItemType() { // return "HashsetName"; //NON-NLS // } } /** * Creates the nodes for the hits in a given set. */ private class HitFactory extends ChildFactory.Detachable<Long> implements Observer { private String hashsetName; private HitFactory(String hashsetName) { super(); this.hashsetName = hashsetName; } @Override protected void addNotify() { hashsetResults.addObserver(this); } @Override protected void removeNotify() { hashsetResults.deleteObserver(this); } @Override protected boolean createKeys(List<Long> list) { list.addAll(hashsetResults.getArtifactIds(hashsetName)); return true; } @Override protected Node createNodeForKey(Long id) { if (skCase == null) { return null; } try { BlackboardArtifact art = skCase.getBlackboardArtifact(id); return new BlackboardArtifactNode(art); } catch (TskException ex) { logger.log(Level.WARNING, "TSK Exception occurred", ex); //NON-NLS } return null; } @Override public void update(Observable o, Object arg) { refresh(true); } } }
Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java
/* * Autopsy Forensic Browser * * Copyright 2011-2015 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.datamodel; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Observable; import java.util.Observer; import java.util.Set; import java.util.logging.Level; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskException; /** * Hash set hits node support. Inner classes have all of the nodes in the tree. */ public class HashsetHits implements AutopsyVisitableItem { private static final String HASHSET_HITS = BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getLabel(); private static final String DISPLAY_NAME = BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName(); private static final Logger logger = Logger.getLogger(HashsetHits.class.getName()); private SleuthkitCase skCase; private final HashsetResults hashsetResults; public HashsetHits(SleuthkitCase skCase) { this.skCase = skCase; hashsetResults = new HashsetResults(); } @Override public <T> T accept(AutopsyItemVisitor<T> v) { return v.visit(this); } /** * Stores all of the hashset results in a single class that is observable * for the child nodes */ private class HashsetResults extends Observable { // maps hashset name to list of artifacts for that set private final Map<String, Set<Long>> hashSetHitsMap = new LinkedHashMap<>(); HashsetResults() { update(); } List<String> getSetNames() { List<String> names = new ArrayList<>(hashSetHitsMap.keySet()); Collections.sort(names); return names; } Set<Long> getArtifactIds(String hashSetName) { return hashSetHitsMap.get(hashSetName); } @SuppressWarnings("deprecation") final void update() { hashSetHitsMap.clear(); if (skCase == null) { return; } int setNameId = ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(); int artId = ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID(); String query = "SELECT value_text,blackboard_attributes.artifact_id,attribute_type_id " //NON-NLS + "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS + "attribute_type_id=" + setNameId //NON-NLS + " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS + " AND blackboard_artifacts.artifact_type_id=" + artId; //NON-NLS try (CaseDbQuery dbQuery = skCase.executeQuery(query)) { ResultSet resultSet = dbQuery.getResultSet(); while (resultSet.next()) { String setName = resultSet.getString("value_text"); //NON-NLS long artifactId = resultSet.getLong("artifact_id"); //NON-NLS if (!hashSetHitsMap.containsKey(setName)) { hashSetHitsMap.put(setName, new HashSet<Long>()); } hashSetHitsMap.get(setName).add(artifactId); } } catch (TskCoreException | SQLException ex) { logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS } setChanged(); notifyObservers(); } } /** * Top-level node for all hash sets */ public class RootNode extends DisplayableItemNode { public RootNode() { super(Children.create(new HashsetNameFactory(), true), Lookups.singleton(DISPLAY_NAME)); super.setName(HASHSET_HITS); super.setDisplayName(DISPLAY_NAME); this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hashset_hits.png"); //NON-NLS } @Override public boolean isLeafTypeNode() { return false; } @Override public <T> T accept(DisplayableItemNodeVisitor<T> v) { return v.visit(this); } @Override protected Sheet createSheet() { Sheet s = super.createSheet(); Sheet.Set ss = s.get(Sheet.PROPERTIES); if (ss == null) { ss = Sheet.createPropertiesSet(); s.put(ss); } ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.desc"), getName())); return s; } /* * TODO (AUT-1849): Correct or remove peristent column reordering code * * Added to support this feature. */ // @Override // public String getItemType() { // return "HashsetRoot"; //NON-NLS // } } /** * Creates child nodes for each hashset name */ private class HashsetNameFactory extends ChildFactory.Detachable<String> implements Observer { /* * This should probably be in the HashsetHits class, but the factory has * nice methods for its startup and shutdown, so it seemed like a * cleaner place to register the property change listener. */ private final PropertyChangeListener pcl = new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { String eventType = evt.getPropertyName(); if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { /** * Checking for a current case is a stop gap measure until a * different way of handling the closing of cases is worked * out. Currently, remote events may be received for a case * that is already closed. */ try { Case.getCurrentCase(); /** * Due to some unresolved issues with how cases are * closed, it is possible for the event to have a null * oldValue if the event is a remote event. */ ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) { hashsetResults.update(); } } catch (IllegalStateException notUsed) { /** * Case is closed, do nothing. */ } } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { /** * Checking for a current case is a stop gap measure until a * different way of handling the closing of cases is worked * out. Currently, remote events may be received for a case * that is already closed. */ try { Case.getCurrentCase(); hashsetResults.update(); } catch (IllegalStateException notUsed) { /** * Case is closed, do nothing. */ } } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) { // case was closed. Remove listeners so that we don't get called with a stale case handle if (evt.getNewValue() == null) { removeNotify(); skCase = null; } } } }; @Override protected void addNotify() { IngestManager.getInstance().addIngestJobEventListener(pcl); IngestManager.getInstance().addIngestModuleEventListener(pcl); Case.addPropertyChangeListener(pcl); hashsetResults.update(); hashsetResults.addObserver(this); } @Override protected void removeNotify() { IngestManager.getInstance().removeIngestJobEventListener(pcl); IngestManager.getInstance().removeIngestModuleEventListener(pcl); Case.removePropertyChangeListener(pcl); hashsetResults.deleteObserver(this); } @Override protected boolean createKeys(List<String> list) { list.addAll(hashsetResults.getSetNames()); return true; } @Override protected Node createNodeForKey(String key) { return new HashsetNameNode(key); } @Override public void update(Observable o, Object arg) { refresh(true); } } /** * Node for a hash set name */ public class HashsetNameNode extends DisplayableItemNode implements Observer { private final String hashSetName; public HashsetNameNode(String hashSetName) { super(Children.create(new HitFactory(hashSetName), true), Lookups.singleton(hashSetName)); super.setName(hashSetName); this.hashSetName = hashSetName; updateDisplayName(); this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hashset_hits.png"); //NON-NLS hashsetResults.addObserver(this); } /** * Update the count in the display name */ private void updateDisplayName() { super.setDisplayName(hashSetName + " (" + hashsetResults.getArtifactIds(hashSetName).size() + ")"); } @Override public boolean isLeafTypeNode() { return true; } @Override protected Sheet createSheet() { Sheet s = super.createSheet(); Sheet.Set ss = s.get(Sheet.PROPERTIES); if (ss == null) { ss = Sheet.createPropertiesSet(); s.put(ss); } ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.desc"), getName())); return s; } @Override public <T> T accept(DisplayableItemNodeVisitor<T> v) { return v.visit(this); } @Override public void update(Observable o, Object arg) { updateDisplayName(); } /* * TODO (AUT-1849): Correct or remove peristent column reordering code * * Added to support this feature. */ // @Override // public String getItemType() { // return "HashsetName"; //NON-NLS // } } /** * Creates the nodes for the hits in a given set. */ private class HitFactory extends ChildFactory.Detachable<Long> implements Observer { private String hashsetName; private HitFactory(String hashsetName) { super(); this.hashsetName = hashsetName; } @Override protected void addNotify() { hashsetResults.addObserver(this); } @Override protected void removeNotify() { hashsetResults.deleteObserver(this); } @Override protected boolean createKeys(List<Long> list) { list.addAll(hashsetResults.getArtifactIds(hashsetName)); return true; } @Override protected Node createNodeForKey(Long id) { if (skCase == null) { return null; } try { BlackboardArtifact art = skCase.getBlackboardArtifact(id); return new BlackboardArtifactNode(art); } catch (TskException ex) { logger.log(Level.WARNING, "TSK Exception occurred", ex); //NON-NLS } return null; } @Override public void update(Observable o, Object arg) { refresh(true); } } }
Made HashsetHits class thread safe
Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java
Made HashsetHits class thread safe
Java
apache-2.0
56cc28fca32c75e57ffd6f33de2cca9dc08600bf
0
usc-isi-i2/dig-similarity
package edu.isi.dig.elasticsearch; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Set; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import net.sf.json.JSONSerializer; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.SearchHit; //import org.slf4j.Logger; //import org.slf4j.LoggerFactory; public class ElasticSearchHandler { final static String BODY_PART="hasBodyPart"; final static String TEXT = "text"; final static String fileName = "config.properties"; final static String IMAGE_CACHE_URL = "hasImagePart.cacheUrl"; final static String SIMILAR_IMAGES = "similar_images_feature"; final static String FEATURE_VALUE_LABEL = "featureValue"; final static String FEATURE_NAME_LABEL = "featureName"; final static String FEATURE_NAME = "similarimageurl"; final static String HAS_FEATURE_COLLECTION = "hasFeatureCollection"; final static String HAS_IMAGE_PART = "hasImagePart"; final static String URI = "uri"; final static String FEATURE_OBJECT = "featureObject"; final static String IMAGE_OBJECT_URIS = "imageObjectUris"; final static String CACHE_URL = "cacheUrl"; static Client esClient=null; static TransportClient ts =null; static SearchResponse searchResp = null; static Properties prop=null; static String indexName=""; static String docType=""; static String environment=""; static String returnPort = "9200"; static String elasticsearchHost=""; static Settings settings = null; // private static Logger LOG = LoggerFactory.getLogger(ElasticSearchHandler.class); public static void Initialize(){ prop = new Properties(); InputStream input = ElasticSearchHandler.class.getClassLoader().getResourceAsStream(fileName); try{ prop.load(input); elasticsearchHost=prop.getProperty("elasticsearchHost"); settings = ImmutableSettings.settingsBuilder() .put(prop.getProperty("clusterNameProperty"), prop.getProperty("clusterName")).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(elasticsearchHost, Integer.parseInt(prop.getProperty("elasticsearchPort")))); indexName = prop.getProperty("indexName"); docType = prop.getProperty("docType"); environment = prop.getProperty("environment"); if(environment.equals("production")){ returnPort = prop.getProperty("nginxPort"); } }catch(IOException ioe){ ioe.printStackTrace(); } } public static String PerformSimpleSearch(String uri){ try{ Initialize(); TermQueryBuilder termQB = QueryBuilders.termQuery(URI, uri); SearchResponse searchResp = esClient.prepareSearch(indexName) .setTypes(docType) .setQuery(termQB) .execute() .actionGet(); SearchHit[] searchHit = searchResp.getHits().getHits(); if(searchHit.length == 1){ return searchHit[0].getSourceAsString(); } return null; }catch(Exception e){ return e.toString(); } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } private static Map<String,Object> collectFeatures(String jsonWebPage){ //AS per our current standards, should be a JSON object JSONObject jSource = (JSONObject) JSONSerializer.toJSON(jsonWebPage); Map<String, Object> mapFeatureCollection = new HashMap<String,Object>(); if(jSource.containsKey("hasFeatureCollection")){ JSONObject jFeatureCollection = jSource.getJSONObject("hasFeatureCollection"); @SuppressWarnings("unchecked") Set<String> keys = jFeatureCollection.keySet(); for(String key : keys){ Object jFeature = jFeatureCollection.get(key); if(jFeature instanceof JSONObject){ if(((JSONObject) jFeature).containsKey("featureName") && ((JSONObject) jFeature).containsKey("featureValue")){ mapFeatureCollection.put(((JSONObject) jFeature).getString("featureName"), ((JSONObject) jFeature).getString("featureValue")); } }else if(jFeature instanceof JSONArray){ JSONArray JFeatures = (JSONArray) jFeature; //if its an array, it has multiple values for the feature, ArrayList<String> multipleValues = new ArrayList<String>(); if(JFeatures.size() > 0){ for(int i=0; i < JFeatures.size();i++){ JSONObject jTemp = JFeatures.getJSONObject(i); if(((JSONObject) jTemp).containsKey("featureName") && ((JSONObject) jTemp).containsKey("featureValue")){ multipleValues.add(((JSONObject) jTemp).getString("featureValue")); } } //get the featureName,multipleValues map mapFeatureCollection.put(JFeatures.getJSONObject(0).getString("featureName"), multipleValues); } } } } return mapFeatureCollection; } public static String FindSimilar(String uri,String sendBack){ try{ String searchSourceJson = PerformSimpleSearch(uri); Map<String,Object> mapSourceFeatures = new HashMap<String, Object>(); if(searchSourceJson!=null){ JSONObject jSourceObj = (JSONObject) JSONSerializer.toJSON(searchSourceJson); if(jSourceObj.containsKey(BODY_PART)){ JSONObject jBodyPart = (JSONObject) jSourceObj.get(BODY_PART); if(jBodyPart.containsKey(TEXT)){ String bodyText = jBodyPart.getString(TEXT); //Create a map of <featureValues,featureNames> mapSourceFeatures = collectFeatures(searchSourceJson); //ToDo: Make this query better MoreLikeThisQueryBuilder qb = QueryBuilders.moreLikeThisQuery("hasBodyPart.text.shingle_4") .likeText(bodyText) .minTermFreq(1) .maxQueryTerms(20); Initialize(); searchResp = esClient.prepareSearch(indexName) .setTypes(docType) .setQuery(qb) .execute() .actionGet(); SearchHit[] searchHit = searchResp.getHits().getHits(); JSONObject jParentObj= new JSONObject(); JSONArray jArray = new JSONArray(); for(SearchHit sh : searchHit){ JSONArray additionalFeatures = new JSONArray(); JSONArray missingFeatures = new JSONArray(); JSONArray differentValuedFeatures = new JSONArray(); JSONObject similarWebPageResult = new JSONObject(); JSONObject jSimilarWebPage = (JSONObject)JSONSerializer.toJSON(sh.getSourceAsString()); Map<String,Object> mapSimilarWPFC = collectFeatures(sh.getSourceAsString()); Set<String> similarWebPagekeys =mapSimilarWPFC.keySet(); for(String key : similarWebPagekeys){ if(mapSourceFeatures.containsKey(key)){ Object sourceFeatureValue = mapSourceFeatures.get(key); Object targetFeatureValue = mapSimilarWPFC.get(key); if(sourceFeatureValue instanceof String && targetFeatureValue instanceof String){ if(!(String.valueOf(sourceFeatureValue).equals(String.valueOf(targetFeatureValue)))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, String.valueOf(targetFeatureValue)); differentValuedFeatures.add(jTemp.toString()); } } else if(sourceFeatureValue instanceof ArrayList<?> && targetFeatureValue instanceof ArrayList<?>){ for(int i=0;i < ((ArrayList<?>) targetFeatureValue).size();i++){ if(!((ArrayList<?>) sourceFeatureValue).contains(((ArrayList<?>) targetFeatureValue).get(i))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, ((ArrayList<?>) targetFeatureValue).get(i)); differentValuedFeatures.add(jTemp.toString()); } } } else{//one is string and other is ArrayList if(sourceFeatureValue instanceof String){//source string, target ArrayList for(int i=0;i < ((ArrayList<?>) targetFeatureValue).size();i++){ if(!((ArrayList<?>) targetFeatureValue).get(i).equals(String.valueOf(sourceFeatureValue))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, ((ArrayList<?>) targetFeatureValue).get(i)); differentValuedFeatures.add(jTemp.toString()); } } }else{//Source - ArrayList , target - String if(!((ArrayList<?>) sourceFeatureValue).contains(String.valueOf(targetFeatureValue))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, String.valueOf(targetFeatureValue)); differentValuedFeatures.add(jTemp.toString()); } } } } else{ //additional features, not present in the source json but in similar json objects JSONObject jTemp = new JSONObject(); Object value = mapSimilarWPFC.get(key); if(value instanceof ArrayList<?>){ for(int i=0;i< ((ArrayList<?>) value).size();i++){ jTemp.accumulate(key, ((ArrayList<?>) value).get(i)); } } else{ jTemp.accumulate(key, value); } additionalFeatures.add(jTemp.toString()); } } Set<String> sourceKeys = mapSourceFeatures.keySet(); for(String key: sourceKeys){ if(!mapSimilarWPFC.containsKey(key)){ JSONObject jTemp = new JSONObject(); Object value = mapSourceFeatures.get(key); if(value instanceof ArrayList<?>){ for(int i=0;i< ((ArrayList<?>) value).size();i++){ jTemp.accumulate(key, ((ArrayList<?>) value).get(i)); } } else{ jTemp.accumulate(key, value); } missingFeatures.add(jTemp.toString()); } } String resultURI = jSimilarWebPage.getString(URI); if(sendBack.equalsIgnoreCase("all")){ similarWebPageResult.accumulate("similarWebPage",jSimilarWebPage); } else{ JSONObject jSimilarBodyPart = jSimilarWebPage.getJSONObject(BODY_PART); similarWebPageResult.accumulate(URI,resultURI); similarWebPageResult.accumulate(BODY_PART + "."+ TEXT, jSimilarBodyPart.getString(TEXT)); } if(additionalFeatures.size() > 0){ similarWebPageResult.accumulate("additionalFeatures", additionalFeatures); } if(differentValuedFeatures.size()>0){ similarWebPageResult.accumulate("featuresWithDifferentValues",differentValuedFeatures); } if(missingFeatures.size() > 0){ similarWebPageResult.accumulate("missingFeatures",missingFeatures); } jArray.add(new JSONObject().accumulate("similarWebPage",similarWebPageResult)); } jParentObj.accumulate("similar", jArray); return jParentObj.toString(); } else{ throw new Exception("hasBodyPart for URI: " + uri + " doesnot contain field 'text'"); } } else{ throw new Exception("Json Object for URI: " + uri + " doesnot contain 'hasBodyPart'"); } } else{ throw new Exception("No WebPage found for uri: " + uri); } }catch(Exception e){ return e.toString(); } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } public static JSONObject addSimilarImagesFeature(JSONObject source, String queryURI,String matchedImageCacheURI) { if(source.containsKey(HAS_FEATURE_COLLECTION)){ JSONObject jHFC = source.getJSONObject("hasFeatureCollection");//Assumption: it is a json object. I would be surprised if it isnt boolean containsFeatureObject = false; JSONObject jObjFeatureObject = new JSONObject(); if(jHFC.containsKey(SIMILAR_IMAGES)){ JSONArray jSimImages = jHFC.getJSONArray(SIMILAR_IMAGES); boolean containsURI = false; for(int i=0;i<jSimImages.size();i++){ if(jSimImages.getJSONObject(i).containsKey(FEATURE_OBJECT)){ containsFeatureObject = true; jObjFeatureObject = jSimImages.getJSONObject(i).getJSONObject(FEATURE_OBJECT); } else { JSONObject jSimImage = jSimImages.getJSONObject(i); if(jSimImage.getString(FEATURE_VALUE_LABEL).equals(queryURI)){ containsURI = true; } } } if(!containsURI){ jSimImages.add(accumulateSimilarImageFeature(queryURI)); } if(containsFeatureObject){ //check if the uri being added is not already in there JSONArray jArrayImageURIs = jObjFeatureObject.getJSONArray(IMAGE_OBJECT_URIS); Object ObjImagePart = source.get(HAS_IMAGE_PART); if(ObjImagePart instanceof JSONArray){ JSONArray jArrayImagePart = (JSONArray) ObjImagePart; for(int j=0;j<jArrayImagePart.size();j++){ JSONObject jObjImage = jArrayImagePart.getJSONObject(j); if(jObjImage.getString(CACHE_URL).equals(matchedImageCacheURI)){ if(!jArrayImageURIs.contains(jObjImage.getString(URI))){ jArrayImageURIs.add(jObjImage.getString(URI)); } } } } else if(ObjImagePart instanceof JSONObject){ JSONObject jObjImage = (JSONObject) ObjImagePart; if(jObjImage.getString(CACHE_URL).equals(matchedImageCacheURI)){ if(!jArrayImageURIs.contains(jObjImage.getString(URI))){ jArrayImageURIs.add(jObjImage.getString(URI)); } } } }else { //add 'featureObject' jSimImages.add(addFeatureObject(source,matchedImageCacheURI)); } } else { JSONArray jNewSimImages = new JSONArray(); jNewSimImages.add(accumulateSimilarImageFeature(queryURI)); jNewSimImages.add(addFeatureObject(source,matchedImageCacheURI)); jHFC.accumulate(SIMILAR_IMAGES, jNewSimImages); } } return source; } public static JSONObject addFeatureObject(JSONObject source,String matchedImageCacheURI){ Object objImagePart = source.get(HAS_IMAGE_PART); JSONObject jObjReturn = new JSONObject(); if(objImagePart instanceof JSONArray) { //JSONArray jArrayImagePart = source.getJSONArray(HAS_IMAGE_PART);//guaranteed to be in there JSONArray jArrayImagePart = (JSONArray) objImagePart;//guaranteed to be in there for(int i=0;i<jArrayImagePart.size();i++){ JSONObject jObjImage = jArrayImagePart.getJSONObject(i); if(jObjImage.getString(CACHE_URL).equals(matchedImageCacheURI)){ JSONArray jArrayImageURIs = new JSONArray(); jArrayImageURIs.add(jObjImage.getString(URI)); JSONObject jObjFeatureObject = new JSONObject(); jObjFeatureObject.accumulate(IMAGE_OBJECT_URIS, jArrayImageURIs); jObjReturn.accumulate(FEATURE_OBJECT, jObjFeatureObject); break; } } } else if(objImagePart instanceof JSONObject) { JSONObject jObjImagePart = (JSONObject) objImagePart; if(jObjImagePart.getString(CACHE_URL).equals(matchedImageCacheURI)){ JSONArray jArrayImageURIs = new JSONArray(); jArrayImageURIs.add(jObjImagePart.getString(URI)); JSONObject jObjFeatureObject = new JSONObject(); jObjFeatureObject.accumulate(IMAGE_OBJECT_URIS, jArrayImageURIs); jObjReturn.accumulate(FEATURE_OBJECT, jObjFeatureObject); } } return jObjReturn; } public static JSONObject accumulateSimilarImageFeature(String queryURI){ JSONObject jNewSimImage = new JSONObject(); jNewSimImage.accumulate(FEATURE_NAME_LABEL, FEATURE_NAME); jNewSimImage.accumulate(FEATURE_NAME, queryURI); jNewSimImage.accumulate(FEATURE_VALUE_LABEL, queryURI); return jNewSimImage; } public static JSONObject UpdateWebPagesWithSimilarImages(JSONArray jArray,String queryURI,String differentIndex) throws Exception{ try{ Initialize(); JSONObject jResults = new JSONObject(); String indexToUse = null; if(differentIndex != null){ indexToUse = differentIndex; }else { indexToUse = indexName; } for(int i=0;i<jArray.size();i++){ TermQueryBuilder termQB = QueryBuilders.termQuery(IMAGE_CACHE_URL, jArray.get(i)); SearchResponse searchResp = esClient.prepareSearch(indexToUse) .setTypes(docType) .setQuery(termQB) .execute() .actionGet(); SearchHit[] searchHit = searchResp.getHits().getHits(); for(SearchHit hit : searchHit){ // LOG.debug("Ads id: "+ hit.getId()); String docId = hit.getId(); JSONObject jUpdatedSource = addSimilarImagesFeature((JSONObject) JSONSerializer.toJSON(hit.getSourceAsString()), queryURI, jArray.get(i).toString()); UpdateRequest updateRequest = new UpdateRequest(); updateRequest.index(indexToUse); updateRequest.type(docType); updateRequest.id(docId); updateRequest.doc(jUpdatedSource); esClient.update(updateRequest).get(); jResults.accumulate("ad_uri", "http://" + elasticsearchHost + ":" + returnPort + "/" + indexToUse + "/" + docType + "/" + docId); } } return jResults; }catch(Exception e){ throw e; } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } }
ds/src/main/java/edu/isi/dig/elasticsearch/ElasticSearchHandler.java
package edu.isi.dig.elasticsearch; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Set; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import net.sf.json.JSONSerializer; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.SearchHit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ElasticSearchHandler { /*final static String SEARCH_RESULTS="results"; final static String CLUSTER_NAME = "cluster.name"; final static String CLUSTER_NAME_VALUE = "dig_isi"; final static String ELASTICSEARCH_HOST = "localhost"; final static int ELASTICSEARCH_PORT = 9300;*/ final static String BODY_PART="hasBodyPart"; final static String TEXT = "text"; final static String fileName = "config.properties"; final static String IMAGE_CACHE_URL = "hasImagePart.cacheUrl"; final static String SIMILAR_IMAGES = "similar_images_feature"; final static String FEATURE_VALUE_LABEL = "featureValue"; final static String FEATURE_NAME_LABEL = "featureName"; final static String FEATURE_NAME = "similarimageurl"; final static String HAS_FEATURE_COLLECTION = "hasFeatureCollection"; final static String HAS_IMAGE_PART = "hasImagePart"; final static String URI = "uri"; final static String FEATURE_OBJECT = "featureObject"; final static String IMAGE_OBJECT_URIS = "imageObjectUris"; final static String CACHE_URL = "cacheUrl"; static Client esClient=null; static TransportClient ts =null; static SearchResponse searchResp = null; static Properties prop=null; static String indexName=""; static String docType=""; static String environment=""; static String returnPort = "9200"; static String elasticsearchHost=""; static Settings settings = null; private static Logger LOG = LoggerFactory.getLogger(ElasticSearchHandler.class); public static void Initialize(){ prop = new Properties(); InputStream input = ElasticSearchHandler.class.getClassLoader().getResourceAsStream(fileName); try{ prop.load(input); elasticsearchHost=prop.getProperty("elasticsearchHost"); settings = ImmutableSettings.settingsBuilder() .put(prop.getProperty("clusterNameProperty"), prop.getProperty("clusterName")).build(); ts = new TransportClient(settings); esClient = ts.addTransportAddress(new InetSocketTransportAddress(elasticsearchHost, Integer.parseInt(prop.getProperty("elasticsearchPort")))); indexName = prop.getProperty("indexName"); docType = prop.getProperty("docType"); environment = prop.getProperty("environment"); if(environment.equals("production")){ returnPort = prop.getProperty("nginxPort"); } }catch(IOException ioe){ ioe.printStackTrace(); } } public static String PerformSimpleSearch(String uri){ try{ Initialize(); TermQueryBuilder termQB = QueryBuilders.termQuery(URI, uri); SearchResponse searchResp = esClient.prepareSearch(indexName) .setTypes(docType) .setQuery(termQB) .execute() .actionGet(); SearchHit[] searchHit = searchResp.getHits().getHits(); if(searchHit.length == 1){ return searchHit[0].getSourceAsString(); } return null; }catch(Exception e){ return e.toString(); } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } private static Map<String,Object> collectFeatures(String jsonWebPage){ //AS per our current standards, should be a JSON object JSONObject jSource = (JSONObject) JSONSerializer.toJSON(jsonWebPage); Map<String, Object> mapFeatureCollection = new HashMap<String,Object>(); //LOG.debug(jsonWebPage); if(jSource.containsKey("hasFeatureCollection")){ JSONObject jFeatureCollection = jSource.getJSONObject("hasFeatureCollection"); @SuppressWarnings("unchecked") Set<String> keys = jFeatureCollection.keySet(); for(String key : keys){ Object jFeature = jFeatureCollection.get(key); if(jFeature instanceof JSONObject){ if(((JSONObject) jFeature).containsKey("featureName") && ((JSONObject) jFeature).containsKey("featureValue")){ mapFeatureCollection.put(((JSONObject) jFeature).getString("featureName"), ((JSONObject) jFeature).getString("featureValue")); } }else if(jFeature instanceof JSONArray){ JSONArray JFeatures = (JSONArray) jFeature; //if its an array, it has multiple values for the feature, ArrayList<String> multipleValues = new ArrayList<String>(); if(JFeatures.size() > 0){ for(int i=0; i < JFeatures.size();i++){ JSONObject jTemp = JFeatures.getJSONObject(i); if(((JSONObject) jTemp).containsKey("featureName") && ((JSONObject) jTemp).containsKey("featureValue")){ multipleValues.add(((JSONObject) jTemp).getString("featureValue")); } } //get the featureName,multipleValues map mapFeatureCollection.put(JFeatures.getJSONObject(0).getString("featureName"), multipleValues); } } } } return mapFeatureCollection; } public static String FindSimilar(String uri,String sendBack){ try{ String searchSourceJson = PerformSimpleSearch(uri); Map<String,Object> mapSourceFeatures = new HashMap<String, Object>(); if(searchSourceJson!=null){ JSONObject jSourceObj = (JSONObject) JSONSerializer.toJSON(searchSourceJson); if(jSourceObj.containsKey(BODY_PART)){ JSONObject jBodyPart = (JSONObject) jSourceObj.get(BODY_PART); if(jBodyPart.containsKey(TEXT)){ String bodyText = jBodyPart.getString(TEXT); //Create a map of <featureValues,featureNames> mapSourceFeatures = collectFeatures(searchSourceJson); //LOG.debug(bodyText); //ToDo: Make this query better MoreLikeThisQueryBuilder qb = QueryBuilders.moreLikeThisQuery("hasBodyPart.text.shingle_4") .likeText(bodyText) .minTermFreq(1) .maxQueryTerms(20); Initialize(); searchResp = esClient.prepareSearch(indexName) .setTypes(docType) .setQuery(qb) .execute() .actionGet(); SearchHit[] searchHit = searchResp.getHits().getHits(); JSONObject jParentObj= new JSONObject(); JSONArray jArray = new JSONArray(); for(SearchHit sh : searchHit){ JSONArray additionalFeatures = new JSONArray(); JSONArray missingFeatures = new JSONArray(); JSONArray differentValuedFeatures = new JSONArray(); JSONObject similarWebPageResult = new JSONObject(); JSONObject jSimilarWebPage = (JSONObject)JSONSerializer.toJSON(sh.getSourceAsString()); Map<String,Object> mapSimilarWPFC = collectFeatures(sh.getSourceAsString()); Set<String> similarWebPagekeys =mapSimilarWPFC.keySet(); for(String key : similarWebPagekeys){ if(mapSourceFeatures.containsKey(key)){ Object sourceFeatureValue = mapSourceFeatures.get(key); Object targetFeatureValue = mapSimilarWPFC.get(key); if(sourceFeatureValue instanceof String && targetFeatureValue instanceof String){ if(!(String.valueOf(sourceFeatureValue).equals(String.valueOf(targetFeatureValue)))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, String.valueOf(targetFeatureValue)); //differentValuedFeatures.add(jTemp); differentValuedFeatures.add(jTemp.toString()); } } else if(sourceFeatureValue instanceof ArrayList<?> && targetFeatureValue instanceof ArrayList<?>){ for(int i=0;i < ((ArrayList<?>) targetFeatureValue).size();i++){ if(!((ArrayList<?>) sourceFeatureValue).contains(((ArrayList<?>) targetFeatureValue).get(i))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, ((ArrayList<?>) targetFeatureValue).get(i)); //differentValuedFeatures.add(jTemp); differentValuedFeatures.add(jTemp.toString()); } } } else{//one is string and other is ArrayList if(sourceFeatureValue instanceof String){//source string, target ArrayList for(int i=0;i < ((ArrayList<?>) targetFeatureValue).size();i++){ if(!((ArrayList<?>) targetFeatureValue).get(i).equals(String.valueOf(sourceFeatureValue))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, ((ArrayList<?>) targetFeatureValue).get(i)); //differentValuedFeatures.add(jTemp); differentValuedFeatures.add(jTemp.toString()); } } }else{//Source - ArrayList , target - String if(!((ArrayList<?>) sourceFeatureValue).contains(String.valueOf(targetFeatureValue))){ JSONObject jTemp = new JSONObject(); jTemp.accumulate(key, String.valueOf(targetFeatureValue)); //differentValuedFeatures.add(jTemp); differentValuedFeatures.add(jTemp.toString()); } } } } else{ //additional features, not present in the source json but in similar json objects JSONObject jTemp = new JSONObject(); Object value = mapSimilarWPFC.get(key); if(value instanceof ArrayList<?>){ for(int i=0;i< ((ArrayList<?>) value).size();i++){ jTemp.accumulate(key, ((ArrayList<?>) value).get(i)); } } else{ jTemp.accumulate(key, value); } //additionalFeatures.add(jTemp); additionalFeatures.add(jTemp.toString()); } } Set<String> sourceKeys = mapSourceFeatures.keySet(); for(String key: sourceKeys){ if(!mapSimilarWPFC.containsKey(key)){ JSONObject jTemp = new JSONObject(); Object value = mapSourceFeatures.get(key); if(value instanceof ArrayList<?>){ for(int i=0;i< ((ArrayList<?>) value).size();i++){ jTemp.accumulate(key, ((ArrayList<?>) value).get(i)); } } else{ jTemp.accumulate(key, value); } //missingFeatures.add(jTemp); missingFeatures.add(jTemp.toString()); } } String resultURI = jSimilarWebPage.getString(URI); if(sendBack.equalsIgnoreCase("all")){ similarWebPageResult.accumulate("similarWebPage",jSimilarWebPage); } else{ JSONObject jSimilarBodyPart = jSimilarWebPage.getJSONObject(BODY_PART); similarWebPageResult.accumulate(URI,resultURI); similarWebPageResult.accumulate(BODY_PART + "."+ TEXT, jSimilarBodyPart.getString(TEXT)); } if(additionalFeatures.size() > 0){ similarWebPageResult.accumulate("additionalFeatures", additionalFeatures); } if(differentValuedFeatures.size()>0){ similarWebPageResult.accumulate("featuresWithDifferentValues",differentValuedFeatures); } if(missingFeatures.size() > 0){ similarWebPageResult.accumulate("missingFeatures",missingFeatures); } jArray.add(new JSONObject().accumulate("similarWebPage",similarWebPageResult)); } jParentObj.accumulate("similar", jArray); return jParentObj.toString(); } else{ throw new Exception("hasBodyPart for URI: " + uri + " doesnot contain field 'text'"); } } else{ throw new Exception("Json Object for URI: " + uri + " doesnot contain 'hasBodyPart'"); } } else{ throw new Exception("No WebPage found for uri: " + uri); } }catch(Exception e){ return e.toString(); } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } public static JSONObject addSimilarImagesFeature(JSONObject source, String queryURI,String matchedImageCacheURI) { if(source.containsKey(HAS_FEATURE_COLLECTION)){ JSONObject jHFC = source.getJSONObject("hasFeatureCollection");//Assumption: it is a json object. I would be surprised if it isnt boolean containsFeatureObject = false; JSONObject jObjFeatureObject = new JSONObject(); if(jHFC.containsKey(SIMILAR_IMAGES)){ JSONArray jSimImages = jHFC.getJSONArray(SIMILAR_IMAGES); boolean containsURI = false; for(int i=0;i<jSimImages.size();i++){ if(jSimImages.getJSONObject(i).containsKey(FEATURE_OBJECT)){ containsFeatureObject = true; jObjFeatureObject = jSimImages.getJSONObject(i).getJSONObject(FEATURE_OBJECT); } else { JSONObject jSimImage = jSimImages.getJSONObject(i); if(jSimImage.getString(FEATURE_VALUE_LABEL).equals(queryURI)){ containsURI = true; } } } if(!containsURI){ jSimImages.add(accumulateSimilarImageFeature(queryURI)); } if(containsFeatureObject){ //check if the uri being added is not already in there JSONArray jArrayImageURIs = jObjFeatureObject.getJSONArray(IMAGE_OBJECT_URIS); JSONArray jArrayImagePart = source.getJSONArray(HAS_IMAGE_PART); for(int j=0;j<jArrayImagePart.size();j++){ JSONObject jObjImage = jArrayImagePart.getJSONObject(j); if(jObjImage.getString(CACHE_URL).equals(matchedImageCacheURI)){ if(!jArrayImageURIs.contains(jObjImage.getString(URI))){ jArrayImageURIs.add(jObjImage.getString(URI)); } } } }else { //add 'featureObject' jSimImages.add(addFeatureObject(source,matchedImageCacheURI)); } } else { JSONArray jNewSimImages = new JSONArray(); jNewSimImages.add(accumulateSimilarImageFeature(queryURI)); jNewSimImages.add(addFeatureObject(source,matchedImageCacheURI)); jHFC.accumulate(SIMILAR_IMAGES, jNewSimImages); } } return source; } public static JSONObject addFeatureObject(JSONObject source,String matchedImageCacheURI){ JSONArray jArrayImagePart = source.getJSONArray(HAS_IMAGE_PART);//guaranteed to be in there JSONObject jObjReturn = new JSONObject(); for(int i=0;i<jArrayImagePart.size();i++){ JSONObject jObjImage = jArrayImagePart.getJSONObject(i); if(jObjImage.getString(CACHE_URL).equals(matchedImageCacheURI)){ JSONArray jArrayImageURIs = new JSONArray(); jArrayImageURIs.add(jObjImage.getString(URI)); JSONObject jObjFeatureObject = new JSONObject(); jObjFeatureObject.accumulate(IMAGE_OBJECT_URIS, jArrayImageURIs); jObjReturn.accumulate(FEATURE_OBJECT, jObjFeatureObject); break; } } return jObjReturn; } public static JSONObject accumulateSimilarImageFeature(String queryURI){ JSONObject jNewSimImage = new JSONObject(); jNewSimImage.accumulate(FEATURE_NAME_LABEL, FEATURE_NAME); jNewSimImage.accumulate(FEATURE_NAME, queryURI); jNewSimImage.accumulate(FEATURE_VALUE_LABEL, queryURI); return jNewSimImage; } public static JSONObject UpdateWebPagesWithSimilarImages(JSONArray jArray,String queryURI,String differentIndex) throws Exception{ try{ Initialize(); JSONObject jResults = new JSONObject(); String indexToUse = null; if(differentIndex != null){ indexToUse = differentIndex; }else { indexToUse = indexName; } for(int i=0;i<jArray.size();i++){ TermQueryBuilder termQB = QueryBuilders.termQuery(IMAGE_CACHE_URL, jArray.get(i)); SearchResponse searchResp = esClient.prepareSearch(indexToUse) .setTypes(docType) .setQuery(termQB) .execute() .actionGet(); SearchHit[] searchHit = searchResp.getHits().getHits(); for(SearchHit hit : searchHit){ LOG.debug("Ads id: "+ hit.getId()); String docId = hit.getId(); JSONObject jUpdatedSource = addSimilarImagesFeature((JSONObject) JSONSerializer.toJSON(hit.getSourceAsString()), queryURI, jArray.get(i).toString()); UpdateRequest updateRequest = new UpdateRequest(); updateRequest.index(indexToUse); updateRequest.type(docType); updateRequest.id(docId); updateRequest.doc(jUpdatedSource); esClient.update(updateRequest).get(); jResults.accumulate("ad_uri", "http://" + elasticsearchHost + ":" + returnPort + "/" + indexToUse + "/" + docType + "/" + docId); } } return jResults; }catch(Exception e){ throw e; } finally{ if(ts!=null) ts.close(); if(esClient!=null) esClient.close(); } } }
check for the case where there is only one image in the hasImagePart object
ds/src/main/java/edu/isi/dig/elasticsearch/ElasticSearchHandler.java
check for the case where there is only one image in the hasImagePart object
Java
apache-2.0
e09404181f64f8638a0dd0dab5daea8b3abab83a
0
allotria/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,youdonghai/intellij-community,allotria/intellij-community,allotria/intellij-community,semonte/intellij-community,hurricup/intellij-community,apixandru/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,retomerz/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,signed/intellij-community,semonte/intellij-community,semonte/intellij-community,ibinti/intellij-community,allotria/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,semonte/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,hurricup/intellij-community,hurricup/intellij-community,signed/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,da1z/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,apixandru/intellij-community,semonte/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,hurricup/intellij-community,apixandru/intellij-community,fitermay/intellij-community,hurricup/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,apixandru/intellij-community,allotria/intellij-community,asedunov/intellij-community,semonte/intellij-community,signed/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,signed/intellij-community,fitermay/intellij-community,xfournet/intellij-community,hurricup/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,youdonghai/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,xfournet/intellij-community,retomerz/intellij-community,fitermay/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,ibinti/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,da1z/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,signed/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,lucafavatella/intellij-community,signed/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,apixandru/intellij-community,da1z/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,semonte/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,semonte/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,da1z/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,fitermay/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,retomerz/intellij-community,apixandru/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,da1z/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,xfournet/intellij-community,fitermay/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,signed/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,fitermay/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,da1z/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ibinti/intellij-community
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.components; import com.intellij.openapi.ui.TypingTarget; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.registry.Registry; import com.intellij.ui.components.JBScrollPane.Alignment; import com.intellij.ui.table.JBTable; import com.intellij.util.ui.ComponentWithEmptyText; import com.intellij.util.ui.StatusText; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.AbstractBorder; import javax.swing.border.Border; import javax.swing.plaf.TreeUI; import javax.swing.plaf.UIResource; import javax.swing.plaf.basic.BasicTreeUI; import java.awt.*; import java.awt.event.ContainerEvent; import java.awt.event.ContainerListener; public class JBViewport extends JViewport implements ZoomableViewport { private static final ViewportLayout ourLayoutManager = new ViewportLayout() { @Override public void layoutContainer(Container parent) { if (parent instanceof JViewport && Registry.is("ide.scroll.new.layout")) { JViewport viewport = (JViewport)parent; Component view = viewport.getView(); if (view != null) { Container grand = viewport.getParent(); if (grand instanceof JScrollPane) { doLayout((JScrollPane)grand, viewport, view); } else { super.layoutContainer(parent); } } return; } JBViewport viewport = (JBViewport)parent; Component view = viewport.getView(); JBScrollPane scrollPane = UIUtil.getParentOfType(JBScrollPane.class, parent); // do not force viewport size on editor component, e.g. EditorTextField and LanguageConsole if (view == null || scrollPane == null || view instanceof TypingTarget) { super.layoutContainer(parent); return; } Dimension size = doSuperLayoutContainer(viewport); Dimension visible = viewport.getExtentSize(); if (scrollPane.getHorizontalScrollBarPolicy() == ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER) { size.width = visible.width; } if (scrollPane.getVerticalScrollBarPolicy() == ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER) { size.height = visible.height; } viewport.setViewSize(size); } private Dimension doSuperLayoutContainer(JBViewport viewport) { try { viewport.mySaveTempViewSize = true; super.layoutContainer(viewport); } finally { viewport.mySaveTempViewSize = false; } return viewport.myTempViewSize; } }; private StatusText myEmptyText; private boolean myPaintingNow; private ZoomingDelegate myZoomer; private Dimension myTempViewSize; private boolean mySaveTempViewSize; private volatile boolean myBackgroundRequested; // avoid cyclic references public JBViewport() { addContainerListener(new ContainerListener() { @Override public void componentAdded(ContainerEvent e) { Component child = e.getChild(); if (child instanceof JBTable) { myEmptyText = ((ComponentWithEmptyText)child).getEmptyText(); myEmptyText.attachTo(JBViewport.this, child); } } @Override public void componentRemoved(ContainerEvent e) { Component child = e.getChild(); if (child instanceof JBTable) { ((ComponentWithEmptyText)child).getEmptyText().attachTo(child); myEmptyText = null; } } }); } @Override public Color getBackground() { Color color = super.getBackground(); if (!myBackgroundRequested && EventQueue.isDispatchThread() && Registry.is("ide.scroll.background.auto")) { if (!isBackgroundSet() || color instanceof UIResource) { Component child = getView(); if (child != null) { try { myBackgroundRequested = true; return child.getBackground(); } finally { myBackgroundRequested = false; } } } } return color; } @Override protected LayoutManager createLayoutManager() { return ourLayoutManager; } @Override public void setViewSize(Dimension newSize) { // only store newSize from ViewportLayout.layoutContainer // if we're going to fix it the next moment in our layoutContainer code if (mySaveTempViewSize) { myTempViewSize = newSize; } else { super.setViewSize(newSize); } } @Override public void paint(Graphics g) { myPaintingNow = true; if (myZoomer != null && myZoomer.isActive()) { myZoomer.paint(g); } else { super.paint(g); if (myEmptyText != null) { myEmptyText.paint(this, g); } } myPaintingNow = false; } @Nullable @Override public Magnificator getMagnificator() { return UIUtil.getClientProperty(getView(), Magnificator.CLIENT_PROPERTY_KEY); } @Override public void magnificationStarted(Point at) { myZoomer = new ZoomingDelegate((JComponent)getView(), this); myZoomer.magnificationStarted(at); } @Override public void magnificationFinished(double magnification) { myZoomer.magnificationFinished(magnification); myZoomer = null; } @Override public void magnify(double magnification) { myZoomer.magnify(magnification); } public boolean isPaintingNow() { return myPaintingNow; } /** * Returns the alignment of the specified scroll bar * if and only if the specified scroll bar * is located over the main viewport. * * @param bar the scroll bar to process * @return the scroll bar alignment or {@code null} */ private static Alignment getAlignment(JScrollBar bar) { if (bar != null && bar.isVisible() && !bar.isOpaque()) { return UIUtil.getClientProperty(bar, Alignment.class); } return null; } private static void doLayout(JScrollPane pane, JViewport viewport, Component view) { updateBorder(view); Dimension actualSize = viewport.getSize(); Dimension extentSize = viewport.toViewCoordinates(actualSize); Dimension viewPreferredSize = view.getPreferredSize(); Dimension viewSize = new Dimension(viewPreferredSize); Point viewPosition = viewport.getViewPosition(); Scrollable scrollable = null; if (view instanceof Scrollable) { scrollable = (Scrollable)view; if (scrollable.getScrollableTracksViewportWidth()) viewSize.width = actualSize.width; if (scrollable.getScrollableTracksViewportHeight()) viewSize.height = actualSize.height; } // If the new viewport size would leave empty space to the right of the view, // right justify the view or left justify the view // when the width of the view is smaller than the container. int maxX = viewSize.width - extentSize.width; if (scrollable == null || pane.getComponentOrientation().isLeftToRight()) { if (viewPosition.x > maxX) { viewPosition.x = Math.max(0, maxX); } } else { viewPosition.x = maxX < 0 ? maxX : Math.max(0, Math.min(maxX, viewPosition.x)); } // If the new viewport size would leave empty space below the view, // bottom justify the view or top justify the view // when the height of the view is smaller than the container. int maxY = viewSize.height - extentSize.height; if (viewPosition.y > maxY) { viewPosition.y = Math.max(0, maxY); } // If we haven't been advised about how the viewports size should change wrt to the viewport, // i.e. if the view isn't an instance of Scrollable, then adjust the views size as follows. if (scrollable == null) { // If the origin of the view is showing and the viewport is bigger than the views preferred size, // then make the view the same size as the viewport. if (viewPosition.x == 0 && actualSize.width > viewPreferredSize.width) viewSize.width = actualSize.width; if (viewPosition.y == 0 && actualSize.height > viewPreferredSize.height) viewSize.height = actualSize.height; } // do not force viewport size on editor component, e.g. EditorTextField and LanguageConsole if (!(view instanceof TypingTarget)) { if (ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER == pane.getHorizontalScrollBarPolicy()) { viewPosition.x = 0; viewSize.width = extentSize.width; } if (ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER == pane.getVerticalScrollBarPolicy()) { viewPosition.y = 0; viewSize.height = extentSize.height; } } viewport.setViewPosition(viewPosition); viewport.setViewSize(viewSize); } private static void updateBorder(Component view) { if (view instanceof JTable) return; // tables are not supported yet if (view instanceof JComponent) { JComponent component = (JComponent)view; Border border = component.getBorder(); if (border instanceof ViewBorder) return; // already set component.setBorder(border == null || border instanceof UIResource ? new ResourceViewBorder(border) : new ViewBorder(border)); } } /** * This border is used to add additional space for a view * and can be changed on UI update. */ private static class ResourceViewBorder extends ViewBorder implements UIResource { ResourceViewBorder(Border border) { super(border); } } /** * This border is used to add additional space for a view. */ private static class ViewBorder extends AbstractBorder { private final Insets myInsets = new Insets(0, 0, 0, 0); private final Border myBorder; ViewBorder(Border border) { myBorder = border; } @Override public Insets getBorderInsets(Component view, Insets insets) { if (insets == null) { insets = new Insets(0, 0, 0, 0); } else { insets.set(0, 0, 0, 0); } if (myBorder != null) { Insets inner = myBorder.getBorderInsets(view); if (inner != null) insets.set(inner.top, inner.left, inner.bottom, inner.right); } if (view instanceof JComponent) { addViewInsets((JComponent)view, insets); } if (!myInsets.equals(insets)) { myInsets.set(insets.top, insets.left, insets.bottom, insets.right); if (view instanceof JComponent) { JComponent component = (JComponent)view; if (component instanceof JTree) { // invalidate cached preferred size JTree tree = (JTree)component; TreeUI ui = tree.getUI(); if (ui instanceof BasicTreeUI) { BasicTreeUI basic = (BasicTreeUI)ui; basic.setLeftChildIndent(basic.getLeftChildIndent()); } } component.revalidate(); } else { view.invalidate(); view.repaint(); } } return insets; } @Override public void paintBorder(Component view, Graphics g, int x, int y, int width, int height) { if (myBorder != null) { // additional insets are used inside a custom border myBorder.paintBorder(view, g, x, y, width, height); } } private void addViewInsets(JComponent view, Insets insets) { if (this == view.getBorder()) { Container parent = view.getParent(); if (parent instanceof JViewport) { JViewport viewport = (JViewport)parent; Container grand = viewport.getParent(); if (grand instanceof JScrollPane) { JScrollPane pane = (JScrollPane)grand; // calculate empty border under vertical scroll bar if (viewport == pane.getViewport() || viewport == pane.getColumnHeader()) { JScrollBar vsb = pane.getVerticalScrollBar(); Alignment va = getAlignment(vsb); if (va == Alignment.LEFT) { insets.left += vsb.getWidth(); } else if (va == Alignment.RIGHT && isAlignmentNeeded(view)) { insets.right += vsb.getWidth(); } } // calculate empty border under horizontal scroll bar if (viewport == pane.getViewport() || viewport == pane.getRowHeader()) { JScrollBar hsb = pane.getHorizontalScrollBar(); Alignment ha = getAlignment(hsb); if (ha == Alignment.TOP) { insets.top += hsb.getHeight(); } else if (ha == Alignment.BOTTOM && isAlignmentNeeded(view)) { insets.bottom += hsb.getHeight(); } } } } } } private boolean isAlignmentNeeded(JComponent view) { return !SystemInfo.isMac && (view instanceof JList || view instanceof JTree || Registry.is("ide.scroll.align.component")); } } }
platform/platform-api/src/com/intellij/ui/components/JBViewport.java
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.components; import com.intellij.openapi.ui.TypingTarget; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.registry.Registry; import com.intellij.ui.components.JBScrollPane.Alignment; import com.intellij.ui.table.JBTable; import com.intellij.util.ui.ComponentWithEmptyText; import com.intellij.util.ui.StatusText; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.AbstractBorder; import javax.swing.border.Border; import javax.swing.plaf.TreeUI; import javax.swing.plaf.UIResource; import javax.swing.plaf.basic.BasicTreeUI; import java.awt.*; import java.awt.event.ContainerEvent; import java.awt.event.ContainerListener; public class JBViewport extends JViewport implements ZoomableViewport { private static final ViewportLayout ourLayoutManager = new ViewportLayout() { @Override public void layoutContainer(Container parent) { if (parent instanceof JViewport && Registry.is("ide.scroll.new.layout")) { JViewport viewport = (JViewport)parent; Component view = viewport.getView(); if (view != null) { Container grand = viewport.getParent(); if (grand instanceof JScrollPane) { doLayout((JScrollPane)grand, viewport, view); } else { super.layoutContainer(parent); } } return; } JBViewport viewport = (JBViewport)parent; Component view = viewport.getView(); JBScrollPane scrollPane = UIUtil.getParentOfType(JBScrollPane.class, parent); // do not force viewport size on editor component, e.g. EditorTextField and LanguageConsole if (view == null || scrollPane == null || view instanceof TypingTarget) { super.layoutContainer(parent); return; } Dimension size = doSuperLayoutContainer(viewport); Dimension visible = viewport.getExtentSize(); if (scrollPane.getHorizontalScrollBarPolicy() == ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER) { size.width = visible.width; } if (scrollPane.getVerticalScrollBarPolicy() == ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER) { size.height = visible.height; } viewport.setViewSize(size); } private Dimension doSuperLayoutContainer(JBViewport viewport) { try { viewport.mySaveTempViewSize = true; super.layoutContainer(viewport); } finally { viewport.mySaveTempViewSize = false; } return viewport.myTempViewSize; } }; private StatusText myEmptyText; private boolean myPaintingNow; private ZoomingDelegate myZoomer; private Dimension myTempViewSize; private boolean mySaveTempViewSize; private volatile boolean myBackgroundRequested; // avoid cyclic references public JBViewport() { addContainerListener(new ContainerListener() { @Override public void componentAdded(ContainerEvent e) { Component child = e.getChild(); if (child instanceof JBTable) { myEmptyText = ((ComponentWithEmptyText)child).getEmptyText(); myEmptyText.attachTo(JBViewport.this, child); } } @Override public void componentRemoved(ContainerEvent e) { Component child = e.getChild(); if (child instanceof JBTable) { ((ComponentWithEmptyText)child).getEmptyText().attachTo(child); myEmptyText = null; } } }); } @Override public Color getBackground() { Color color = super.getBackground(); if (!myBackgroundRequested && EventQueue.isDispatchThread() && Registry.is("ide.scroll.background.auto")) { if (!isBackgroundSet() || color instanceof UIResource) { Component child = getView(); if (child != null) { try { myBackgroundRequested = true; return child.getBackground(); } finally { myBackgroundRequested = false; } } } } return color; } @Override protected LayoutManager createLayoutManager() { return ourLayoutManager; } @Override public void setViewSize(Dimension newSize) { // only store newSize from ViewportLayout.layoutContainer // if we're going to fix it the next moment in our layoutContainer code if (mySaveTempViewSize) { myTempViewSize = newSize; } else { super.setViewSize(newSize); } } @Override public void paint(Graphics g) { myPaintingNow = true; if (myZoomer != null && myZoomer.isActive()) { myZoomer.paint(g); } else { super.paint(g); if (myEmptyText != null) { myEmptyText.paint(this, g); } } myPaintingNow = false; } @Nullable @Override public Magnificator getMagnificator() { return UIUtil.getClientProperty(getView(), Magnificator.CLIENT_PROPERTY_KEY); } @Override public void magnificationStarted(Point at) { myZoomer = new ZoomingDelegate((JComponent)getView(), this); myZoomer.magnificationStarted(at); } @Override public void magnificationFinished(double magnification) { myZoomer.magnificationFinished(magnification); myZoomer = null; } @Override public void magnify(double magnification) { myZoomer.magnify(magnification); } public boolean isPaintingNow() { return myPaintingNow; } /** * Returns the alignment of the specified scroll bar * if and only if the specified scroll bar * is located over the main viewport. * * @param bar the scroll bar to process * @return the scroll bar alignment or {@code null} */ private static Alignment getAlignment(JScrollBar bar) { if (bar != null && bar.isVisible() && !bar.isOpaque()) { return UIUtil.getClientProperty(bar, Alignment.class); } return null; } private static void doLayout(JScrollPane pane, JViewport viewport, Component view) { updateBorder(view); Dimension actualSize = viewport.getSize(); Dimension extentSize = viewport.toViewCoordinates(actualSize); Dimension viewPreferredSize = view.getPreferredSize(); Dimension viewSize = new Dimension(viewPreferredSize); Point viewPosition = viewport.getViewPosition(); Scrollable scrollable = null; if (view instanceof Scrollable) { scrollable = (Scrollable)view; if (scrollable.getScrollableTracksViewportWidth()) viewSize.width = actualSize.width; if (scrollable.getScrollableTracksViewportHeight()) viewSize.height = actualSize.height; } // If the new viewport size would leave empty space to the right of the view, // right justify the view or left justify the view // when the width of the view is smaller than the container. int maxX = viewSize.width - extentSize.width; if (scrollable == null || pane.getComponentOrientation().isLeftToRight()) { if (viewPosition.x > maxX) { viewPosition.x = Math.max(0, maxX); } } else { viewPosition.x = maxX < 0 ? maxX : Math.max(0, Math.min(maxX, viewPosition.x)); } // If the new viewport size would leave empty space below the view, // bottom justify the view or top justify the view // when the height of the view is smaller than the container. int maxY = viewSize.height - extentSize.height; if (viewPosition.y > maxY) { viewPosition.y = Math.max(0, maxY); } // If we haven't been advised about how the viewports size should change wrt to the viewport, // i.e. if the view isn't an instance of Scrollable, then adjust the views size as follows. if (scrollable == null) { // If the origin of the view is showing and the viewport is bigger than the views preferred size, // then make the view the same size as the viewport. if (viewPosition.x == 0 && actualSize.width > viewPreferredSize.width) viewSize.width = actualSize.width; if (viewPosition.y == 0 && actualSize.height > viewPreferredSize.height) viewSize.height = actualSize.height; } // do not force viewport size on editor component, e.g. EditorTextField and LanguageConsole if (!(view instanceof TypingTarget)) { if (ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER == pane.getHorizontalScrollBarPolicy()) { viewPosition.x = 0; viewSize.width = extentSize.width; } if (ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER == pane.getVerticalScrollBarPolicy()) { viewPosition.y = 0; viewSize.height = extentSize.height; } } viewport.setViewPosition(viewPosition); viewport.setViewSize(viewSize); } private static void updateBorder(Component view) { if (view instanceof JTable) return; // tables are not supported yet if (view instanceof JList || view instanceof JTree || view instanceof JComponent && Registry.is("ide.scroll.align.component")) { JComponent component = (JComponent)view; Border border = component.getBorder(); if (border instanceof ViewBorder) return; // already set component.setBorder(border == null || border instanceof UIResource ? new ResourceViewBorder(border) : new ViewBorder(border)); } } /** * This border is used to add additional space for a view * and can be changed on UI update. */ private static class ResourceViewBorder extends ViewBorder implements UIResource { ResourceViewBorder(Border border) { super(border); } } /** * This border is used to add additional space for a view. */ private static class ViewBorder extends AbstractBorder { private final Insets myInsets = new Insets(0, 0, 0, 0); private final Border myBorder; ViewBorder(Border border) { myBorder = border; } @Override public Insets getBorderInsets(Component view, Insets insets) { if (insets == null) { insets = new Insets(0, 0, 0, 0); } else { insets.set(0, 0, 0, 0); } if (myBorder != null) { Insets inner = myBorder.getBorderInsets(view); if (inner != null) insets.set(inner.top, inner.left, inner.bottom, inner.right); } if (view instanceof JComponent) { addViewInsets((JComponent)view, insets); } if (!myInsets.equals(insets)) { myInsets.set(insets.top, insets.left, insets.bottom, insets.right); if (view instanceof JComponent) { JComponent component = (JComponent)view; if (component instanceof JTree) { // invalidate cached preferred size JTree tree = (JTree)component; TreeUI ui = tree.getUI(); if (ui instanceof BasicTreeUI) { BasicTreeUI basic = (BasicTreeUI)ui; basic.setLeftChildIndent(basic.getLeftChildIndent()); } } component.revalidate(); } else { view.invalidate(); view.repaint(); } } return insets; } @Override public void paintBorder(Component view, Graphics g, int x, int y, int width, int height) { if (myBorder != null) { // additional insets are used inside a custom border myBorder.paintBorder(view, g, x, y, width, height); } } private void addViewInsets(JComponent view, Insets insets) { if (this == view.getBorder()) { Container parent = view.getParent(); if (parent instanceof JViewport) { JViewport viewport = (JViewport)parent; Container grand = viewport.getParent(); if (grand instanceof JScrollPane) { JScrollPane pane = (JScrollPane)grand; // calculate empty border under vertical scroll bar if (viewport == pane.getViewport() || viewport == pane.getColumnHeader()) { JScrollBar vsb = pane.getVerticalScrollBar(); Alignment va = getAlignment(vsb); if (va == Alignment.LEFT) { insets.left += vsb.getWidth(); } else if (va == Alignment.RIGHT && !SystemInfo.isMac) { insets.right += vsb.getWidth(); } } // calculate empty border under horizontal scroll bar if (viewport == pane.getViewport() || viewport == pane.getRowHeader()) { JScrollBar hsb = pane.getHorizontalScrollBar(); Alignment ha = getAlignment(hsb); if (ha == Alignment.TOP) { insets.top += hsb.getHeight(); } else if (ha == Alignment.BOTTOM && !SystemInfo.isMac) { insets.bottom += hsb.getHeight(); } } } } } } } }
PY-18440 Python consoles with scrollbar constantly twitching on repaint (quickfix with diff support)
platform/platform-api/src/com/intellij/ui/components/JBViewport.java
PY-18440 Python consoles with scrollbar constantly twitching on repaint (quickfix with diff support)
Java
apache-2.0
095c10a12efd198fb526b1150c7fe6d1a050fc5c
0
3dcitydb/importer-exporter,3dcitydb/importer-exporter,3dcitydb/importer-exporter
/* * 3D City Database - The Open Source CityGML Database * http://www.3dcitydb.org/ * * Copyright 2013 - 2019 * Chair of Geoinformatics * Technical University of Munich, Germany * https://www.gis.bgu.tum.de/ * * The 3D City Database is jointly developed with the following * cooperation partners: * * virtualcitySYSTEMS GmbH, Berlin <http://www.virtualcitysystems.de/> * M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.citydb.cli; import org.citydb.citygml.exporter.CityGMLExportException; import org.citydb.citygml.exporter.controller.Exporter; import org.citydb.citygml.importer.CityGMLImportException; import org.citydb.citygml.importer.controller.Importer; import org.citydb.citygml.importer.controller.XMLValidator; import org.citydb.config.Config; import org.citydb.config.project.database.DBConnection; import org.citydb.config.project.database.DatabaseConfigurationException; import org.citydb.config.project.database.DatabaseSrs; import org.citydb.database.connection.DatabaseConnectionPool; import org.citydb.database.connection.DatabaseConnectionWarning; import org.citydb.database.schema.mapping.SchemaMapping; import org.citydb.database.version.DatabaseVersionException; import org.citydb.event.EventDispatcher; import org.citydb.log.Logger; import org.citydb.modules.kml.controller.KmlExportException; import org.citydb.modules.kml.controller.KmlExporter; import org.citydb.registry.ObjectRegistry; import org.citydb.util.Util; import org.citygml4j.builder.jaxb.CityGMLBuilder; import javax.xml.bind.JAXBContext; import java.io.File; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; public class ImpExpCli { private final Logger log = Logger.getInstance(); private final DatabaseConnectionPool dbPool; private final SchemaMapping schemaMapping; private CityGMLBuilder cityGMLBuilder; private JAXBContext jaxbKmlContext; private JAXBContext jaxbColladaContext; private Config config; public ImpExpCli(JAXBContext jaxbKmlContext, JAXBContext jaxbColladaContext, Config config) { this.jaxbKmlContext = jaxbKmlContext; this.jaxbColladaContext = jaxbColladaContext; this.config = config; dbPool = DatabaseConnectionPool.getInstance(); cityGMLBuilder = ObjectRegistry.getInstance().getCityGMLBuilder(); schemaMapping = ObjectRegistry.getInstance().getSchemaMapping(); } public void doImport(String importFiles) { // prepare list of files to be validated List<Path> files = getFiles(importFiles); if (files.size() == 0) { log.error("Invalid list of files to be imported"); log.error("Aborting..."); return; } initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return; } log.info("Initializing database import..."); config.getInternal().setImportFiles(files); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); Importer importer = new Importer(cityGMLBuilder, schemaMapping, config, eventDispatcher); boolean success; try { success = importer.doProcess(); } catch (CityGMLImportException e) { log.error("Aborting due to an internal error: " + e.getMessage()); success = false; Throwable cause = e.getCause(); while (cause != null) { log.error(cause.getClass().getTypeName() + ": " + cause.getMessage()); cause = cause.getCause(); } } finally { try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } dbPool.disconnect(); } if (success) { log.info("Database import successfully finished."); } else { log.warn("Database import aborted."); System.exit(1); } } public void doValidate(String validateFiles) { // prepare list of files to be validated List<Path> files = getFiles(validateFiles); if (files.size() == 0) { log.error("Invalid list of files to be validated"); log.error("Aborting..."); return; } log.info("Initializing XML validation..."); config.getInternal().setImportFiles(files); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); XMLValidator validator = new XMLValidator(config, eventDispatcher); boolean success = validator.doProcess(); try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } if (success) { log.info("XML validation finished."); } else { log.warn("XML validation aborted."); System.exit(1); } } public void doExport(String exportFile) { if (!setExportFile(exportFile)) return; initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return; } log.info("Initializing database export..."); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); Exporter exporter = new Exporter(cityGMLBuilder, schemaMapping, config, eventDispatcher); boolean success = false; try { success = exporter.doProcess(); } catch (CityGMLExportException e) { log.error(e.getMessage()); Throwable cause = e.getCause(); while (cause != null) { log.error(cause.getClass().getTypeName() + ": " + cause.getMessage()); cause = cause.getCause(); } } finally { try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } dbPool.disconnect(); } if (success) { log.info("Database export successfully finished."); } else { log.warn("Database export aborted."); System.exit(1); } } public void doKmlExport(String kmlExportFile) { if (!setExportFile(kmlExportFile)) return; initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return; } log.info("Initializing database export..."); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); KmlExporter kmlExporter = new KmlExporter(jaxbKmlContext, jaxbColladaContext, schemaMapping, config, eventDispatcher); boolean success = false; try { success = kmlExporter.doProcess(); } catch (KmlExportException e) { log.error(e.getMessage()); Throwable cause = e.getCause(); while (cause != null) { log.error(cause.getClass().getTypeName() + ": " + cause.getMessage()); cause = cause.getCause(); } } finally { try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } dbPool.disconnect(); } if (success) { log.info("Database export successfully finished."); } else { log.warn("Database export aborted."); System.exit(1); } } private boolean setExportFile(String kmlExportFile) { try { config.getInternal().setExportFile(new File(kmlExportFile).toPath()); } catch (InvalidPathException e) { log.error("'" + kmlExportFile + "' is not a valid file."); log.error("Aborting..."); return false; } return true; } public boolean doTestConnection() { initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return false; } dbPool.disconnect(); return true; } private void initDBPool() { // check active connection DBConnection conn = config.getProject().getDatabase().getActiveConnection(); if (conn == null) { log.error("No valid database connection found in project settings."); return; } log.info("Connecting to database profile '" + conn.getDescription() + "'."); conn.setInternalPassword(conn.getPassword()); try { dbPool.connect(config); log.info("Database connection established."); dbPool.getActiveDatabaseAdapter().getConnectionMetaData().printToConsole(); // log unsupported user-defined SRSs for (DatabaseSrs refSys : config.getProject().getDatabase().getReferenceSystems()) { if (!refSys.isSupported()) log.warn("Reference system '" + refSys.getDescription() + "' (SRID: " + refSys.getSrid() + ") is not supported."); } // print connection warnings List<DatabaseConnectionWarning> warnings = dbPool.getActiveDatabaseAdapter().getConnectionWarnings(); if (!warnings.isEmpty()) { for (DatabaseConnectionWarning warning : warnings) log.warn(warning.getMessage()); } } catch (DatabaseConfigurationException | SQLException e) { log.error("Connection to database could not be established: " + e.getMessage()); } catch (DatabaseVersionException e) { log.error(e.getMessage()); log.error("Supported versions are '" + Util.collection2string(e.getSupportedVersions(), ", ") + "'."); log.error("Connection to database could not be established."); } } private List<Path> getFiles(String fileNames) { List<Path> files = new ArrayList<>(); for (String part : fileNames.split(";")) { if (part == null || part.trim().isEmpty()) continue; File file = new File(part.trim()); if (file.isDirectory()) { files.add(file.toPath()); continue; } final String pathName = new File(file.getAbsolutePath()).getParent(); final String fileName = file.getName().replace("?", ".?").replace("*", ".*?"); file = new File(pathName); if (!file.exists()) { log.error("'" + file.toString() + "' does not exist"); continue; } File[] wildcardList = file.listFiles((dir, name) -> (name.matches(fileName))); if (wildcardList != null && wildcardList.length != 0) { for (File item : wildcardList) files.add(item.toPath()); } } return files; } }
impexp-client/src/main/java/org/citydb/cli/ImpExpCli.java
/* * 3D City Database - The Open Source CityGML Database * http://www.3dcitydb.org/ * * Copyright 2013 - 2019 * Chair of Geoinformatics * Technical University of Munich, Germany * https://www.gis.bgu.tum.de/ * * The 3D City Database is jointly developed with the following * cooperation partners: * * virtualcitySYSTEMS GmbH, Berlin <http://www.virtualcitysystems.de/> * M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.citydb.cli; import org.citydb.citygml.exporter.CityGMLExportException; import org.citydb.citygml.exporter.controller.Exporter; import org.citydb.citygml.importer.CityGMLImportException; import org.citydb.citygml.importer.controller.Importer; import org.citydb.citygml.importer.controller.XMLValidator; import org.citydb.config.Config; import org.citydb.config.project.database.DBConnection; import org.citydb.config.project.database.DatabaseConfigurationException; import org.citydb.config.project.database.DatabaseSrs; import org.citydb.database.connection.DatabaseConnectionPool; import org.citydb.database.connection.DatabaseConnectionWarning; import org.citydb.database.schema.mapping.SchemaMapping; import org.citydb.database.version.DatabaseVersionException; import org.citydb.event.EventDispatcher; import org.citydb.log.Logger; import org.citydb.modules.kml.controller.KmlExportException; import org.citydb.modules.kml.controller.KmlExporter; import org.citydb.registry.ObjectRegistry; import org.citydb.util.Util; import org.citygml4j.builder.jaxb.CityGMLBuilder; import javax.xml.bind.JAXBContext; import java.io.File; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; public class ImpExpCli { private final Logger log = Logger.getInstance(); private final DatabaseConnectionPool dbPool; private final SchemaMapping schemaMapping; private CityGMLBuilder cityGMLBuilder; private JAXBContext jaxbKmlContext; private JAXBContext jaxbColladaContext; private Config config; public ImpExpCli(JAXBContext jaxbKmlContext, JAXBContext jaxbColladaContext, Config config) { this.jaxbKmlContext = jaxbKmlContext; this.jaxbColladaContext = jaxbColladaContext; this.config = config; dbPool = DatabaseConnectionPool.getInstance(); cityGMLBuilder = ObjectRegistry.getInstance().getCityGMLBuilder(); schemaMapping = ObjectRegistry.getInstance().getSchemaMapping(); } public void doImport(String importFiles) { // prepare list of files to be validated List<Path> files = getFiles(importFiles); if (files.size() == 0) { log.error("Invalid list of files to be imported"); log.error("Aborting..."); return; } initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return; } log.info("Initializing database import..."); config.getInternal().setImportFiles(files); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); Importer importer = new Importer(cityGMLBuilder, schemaMapping, config, eventDispatcher); boolean success; try { success = importer.doProcess(); } catch (CityGMLImportException e) { log.error("Aborting due to an internal error: " + e.getMessage()); success = false; Throwable cause = e.getCause(); while (cause != null) { log.error(cause.getClass().getTypeName() + ": " + cause.getMessage()); cause = cause.getCause(); } } finally { try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } dbPool.disconnect(); } if (success) { log.info("Database import successfully finished."); } else { log.warn("Database import aborted."); } } public void doValidate(String validateFiles) { // prepare list of files to be validated List<Path> files = getFiles(validateFiles); if (files.size() == 0) { log.error("Invalid list of files to be validated"); log.error("Aborting..."); return; } log.info("Initializing XML validation..."); config.getInternal().setImportFiles(files); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); XMLValidator validator = new XMLValidator(config, eventDispatcher); boolean success = validator.doProcess(); try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } if (success) { log.info("XML validation finished."); } else { log.warn("XML validation aborted."); } } public void doExport(String exportFile) { if (!setExportFile(exportFile)) return; initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return; } log.info("Initializing database export..."); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); Exporter exporter = new Exporter(cityGMLBuilder, schemaMapping, config, eventDispatcher); boolean success = false; try { success = exporter.doProcess(); } catch (CityGMLExportException e) { log.error(e.getMessage()); Throwable cause = e.getCause(); while (cause != null) { log.error(cause.getClass().getTypeName() + ": " + cause.getMessage()); cause = cause.getCause(); } } finally { try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } dbPool.disconnect(); } if (success) { log.info("Database export successfully finished."); } else { log.warn("Database export aborted."); } } public void doKmlExport(String kmlExportFile) { if (!setExportFile(kmlExportFile)) return; initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return; } log.info("Initializing database export..."); EventDispatcher eventDispatcher = ObjectRegistry.getInstance().getEventDispatcher(); KmlExporter kmlExporter = new KmlExporter(jaxbKmlContext, jaxbColladaContext, schemaMapping, config, eventDispatcher); boolean success = false; try { success = kmlExporter.doProcess(); } catch (KmlExportException e) { log.error(e.getMessage()); Throwable cause = e.getCause(); while (cause != null) { log.error(cause.getClass().getTypeName() + ": " + cause.getMessage()); cause = cause.getCause(); } } finally { try { eventDispatcher.flushEvents(); } catch (InterruptedException e) { // } dbPool.disconnect(); } if (success) { log.info("Database export successfully finished."); } else { log.warn("Database export aborted."); } } private boolean setExportFile(String kmlExportFile) { try { config.getInternal().setExportFile(new File(kmlExportFile).toPath()); } catch (InvalidPathException e) { log.error("'" + kmlExportFile + "' is not a valid file."); log.error("Aborting..."); return false; } return true; } public boolean doTestConnection() { initDBPool(); if (!dbPool.isConnected()) { log.error("Aborting..."); return false; } dbPool.disconnect(); return true; } private void initDBPool() { // check active connection DBConnection conn = config.getProject().getDatabase().getActiveConnection(); if (conn == null) { log.error("No valid database connection found in project settings."); return; } log.info("Connecting to database profile '" + conn.getDescription() + "'."); conn.setInternalPassword(conn.getPassword()); try { dbPool.connect(config); log.info("Database connection established."); dbPool.getActiveDatabaseAdapter().getConnectionMetaData().printToConsole(); // log unsupported user-defined SRSs for (DatabaseSrs refSys : config.getProject().getDatabase().getReferenceSystems()) { if (!refSys.isSupported()) log.warn("Reference system '" + refSys.getDescription() + "' (SRID: " + refSys.getSrid() + ") is not supported."); } // print connection warnings List<DatabaseConnectionWarning> warnings = dbPool.getActiveDatabaseAdapter().getConnectionWarnings(); if (!warnings.isEmpty()) { for (DatabaseConnectionWarning warning : warnings) log.warn(warning.getMessage()); } } catch (DatabaseConfigurationException | SQLException e) { log.error("Connection to database could not be established: " + e.getMessage()); } catch (DatabaseVersionException e) { log.error(e.getMessage()); log.error("Supported versions are '" + Util.collection2string(e.getSupportedVersions(), ", ") + "'."); log.error("Connection to database could not be established."); } } private List<Path> getFiles(String fileNames) { List<Path> files = new ArrayList<>(); for (String part : fileNames.split(";")) { if (part == null || part.trim().isEmpty()) continue; File file = new File(part.trim()); if (file.isDirectory()) { files.add(file.toPath()); continue; } final String pathName = new File(file.getAbsolutePath()).getParent(); final String fileName = file.getName().replace("?", ".?").replace("*", ".*?"); file = new File(pathName); if (!file.exists()) { log.error("'" + file.toString() + "' does not exist"); continue; } File[] wildcardList = file.listFiles((dir, name) -> (name.matches(fileName))); if (wildcardList != null && wildcardList.length != 0) { for (File item : wildcardList) files.add(item.toPath()); } } return files; } }
set exit code for CLI
impexp-client/src/main/java/org/citydb/cli/ImpExpCli.java
set exit code for CLI
Java
apache-2.0
288670b774c2fb58bc7a98ea25e0fd54e228d360
0
alien11689/aries,rotty3000/aries,apache/aries,rotty3000/aries,graben/aries,apache/aries,apache/aries,graben/aries,alien11689/aries,rotty3000/aries,apache/aries,alien11689/aries,alien11689/aries,rotty3000/aries,graben/aries,graben/aries
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.application.resolver.obr; import static org.apache.aries.application.utils.AppConstants.LOG_ENTRY; import static org.apache.aries.application.utils.AppConstants.LOG_EXIT; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.aries.application.ApplicationMetadata; import org.apache.aries.application.Content; import org.apache.aries.application.InvalidAttributeException; import org.apache.aries.application.VersionRange; import org.apache.aries.application.management.AriesApplication; import org.apache.aries.application.management.BundleInfo; import org.apache.aries.application.management.ResolveConstraint; import org.apache.aries.application.management.ResolverException; import org.apache.aries.application.management.spi.repository.PlatformRepository; import org.apache.aries.application.management.spi.resolve.AriesApplicationResolver; import org.apache.aries.application.modelling.ImportedBundle; import org.apache.aries.application.modelling.ModelledResource; import org.apache.aries.application.modelling.ModellingConstants; import org.apache.aries.application.modelling.ModellingManager; import org.apache.aries.application.modelling.utils.ModellingHelper; import org.apache.aries.application.resolver.internal.MessageUtil; import org.apache.aries.application.resolver.obr.impl.ApplicationResourceImpl; import org.apache.aries.application.resolver.obr.impl.ModelledBundleResource; import org.apache.aries.application.resolver.obr.impl.OBRBundleInfo; import org.apache.aries.application.resolver.obr.impl.RepositoryGeneratorImpl; import org.apache.aries.application.resolver.obr.impl.ResourceWrapper; import org.apache.aries.application.utils.AppConstants; import org.apache.aries.application.utils.filesystem.IOUtils; import org.apache.aries.application.utils.manifest.ManifestHeaderProcessor; import org.apache.aries.application.utils.manifest.ManifestHeaderProcessor.NameValueMap; import org.apache.felix.bundlerepository.Capability; import org.apache.felix.bundlerepository.DataModelHelper; import org.apache.felix.bundlerepository.Reason; import org.apache.felix.bundlerepository.Repository; import org.apache.felix.bundlerepository.RepositoryAdmin; import org.apache.felix.bundlerepository.Requirement; import org.apache.felix.bundlerepository.Resolver; import org.apache.felix.bundlerepository.Resource; import org.osgi.framework.Constants; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.Version; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @version $Rev$ $Date$ */ public class OBRAriesResolver implements AriesApplicationResolver { private static Logger log = LoggerFactory.getLogger(OBRAriesResolver.class); private final RepositoryAdmin repositoryAdmin; private boolean returnOptionalResources = true; private PlatformRepository platformRepository; private ModellingManager modellingManager; private ModellingHelper modellingHelper; public void setModellingManager (ModellingManager m) { modellingManager = m; } public void setModellingHelper (ModellingHelper mh) { modellingHelper = mh; } public PlatformRepository getPlatformRepository() { return platformRepository; } public RepositoryAdmin getRepositoryAdmin() { return this.repositoryAdmin; } public void setPlatformRepository(PlatformRepository platformRepository) { this.platformRepository = platformRepository; } public OBRAriesResolver(RepositoryAdmin repositoryAdmin) { this.repositoryAdmin = repositoryAdmin; } public void setReturnOptionalResources(boolean optional) { this.returnOptionalResources = optional; } public boolean getReturnOptionalResources() { return returnOptionalResources; } public Collection<ModelledResource> resolve(String appName, String appVersion, Collection<ModelledResource> byValueBundles, Collection<Content> inputs) throws ResolverException { return resolve(appName, appVersion, byValueBundles, inputs, this.platformRepository); } /** * Resolve a list of resources from the OBR bundle repositories by OBR * resolver. * * @param appName - application name * @param appVersion - application version * @param byValueBundles - by value bundles * @param inputs - other constraints * @param platformRepository - a platform repository to use instead of the one provided as a service * @return a collection of modelled resources required by this application * @throws ResolverException */ @Override public Collection<ModelledResource> resolve(String appName, String appVersion, Collection<ModelledResource> byValueBundles, Collection<Content> inputs, PlatformRepository platformRepository) throws ResolverException { log.debug(LOG_ENTRY, "resolve", new Object[]{appName, appVersion,byValueBundles, inputs}); Collection<ImportedBundle> importedBundles = toImportedBundle(inputs); Collection<ModelledResource> toReturn = new ArrayList<ModelledResource>(); Resolver obrResolver = getConfiguredObrResolver(appName, appVersion, byValueBundles, platformRepository); // add a resource describing the requirements of the application metadata. obrResolver.add(createApplicationResource( appName, appVersion, importedBundles)); if (obrResolver.resolve()) { List<Resource> requiredResources = retrieveRequiredResources(obrResolver); if (requiredResources == null) { log.debug("resolver.getRequiredResources() returned null"); } else { for (Resource r : requiredResources) { NameValueMap<String, String> attribs = new NameValueMap<String, String>(); attribs.put(Constants.VERSION_ATTRIBUTE, "[" + r.getVersion() + ',' + r.getVersion() + "]"); ModelledResource modelledResourceForThisMatch = null; // OBR may give us back the global capabilities. Typically these do not have a bundle symbolic name - they're a // list of packages available in the target runtime environment. If the resource has no symbolic name, we can ignore it if (r.getSymbolicName() != null) { try { modelledResourceForThisMatch = new ModelledBundleResource (r, modellingManager, modellingHelper); } catch (InvalidAttributeException iax) { ResolverException re = new ResolverException("Internal error occurred: " + iax.toString()); log.debug(LOG_EXIT, "resolve", re); throw re; } toReturn.add(modelledResourceForThisMatch); } } } log.debug(LOG_EXIT, "resolve", toReturn); return toReturn; } else { Reason[] reasons = obrResolver.getUnsatisfiedRequirements(); // let's refine the list by removing the indirect unsatisfied bundles that are caused by unsatisfied packages or other bundles Map<String,Set<String>> refinedReqs = refineUnsatisfiedRequirements(obrResolver, reasons); StringBuffer reqList = new StringBuffer(); Map<String, String> unsatisfiedRequirements = extractConsumableMessageInfo(refinedReqs); for (String reason : unsatisfiedRequirements.keySet()) { reqList.append('\n'); reqList.append(reason); } ResolverException re = new ResolverException(MessageUtil.getMessage("RESOLVER_UNABLE_TO_RESOLVE", new Object[] { appName, reqList })); re.setUnsatisfiedRequirementsAndReasons(unsatisfiedRequirements); log.debug(LOG_EXIT, "resolve", re); throw re; } } private Resolver getConfiguredObrResolver(String appName, String appVersion, Collection<ModelledResource> byValueBundles) throws ResolverException { return getConfiguredObrResolver(appName, appVersion, byValueBundles, platformRepository); } private Resolver getConfiguredObrResolver(String appName, String appVersion, Collection<ModelledResource> byValueBundles, PlatformRepository platformRepository) throws ResolverException { log.debug(LOG_ENTRY, "getConfiguredObrResolver", new Object[]{appName, appVersion,byValueBundles }); DataModelHelper helper = repositoryAdmin.getHelper(); Repository appRepo; try { ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); RepositoryGeneratorImpl.generateRepository(repositoryAdmin, appName + "_" + appVersion, byValueBundles, bytesOut); appRepo = helper.readRepository(new InputStreamReader(new ByteArrayInputStream(bytesOut.toByteArray()))); } catch (Exception e) { throw new ResolverException(e); } List<Repository> resolveRepos = new ArrayList<Repository>(); // add system repository resolveRepos.add(repositoryAdmin.getSystemRepository()); // add local repository if configured if (!(excludeLocalRuntime())) { resolveRepos.add(getLocalRepository(repositoryAdmin)); } // add application repository resolveRepos.add(appRepo); // Need to refresh the repositories added to repository admin // add user-defined repositories Repository[] repos = repositoryAdmin.listRepositories(); for (Repository r : repos) { resolveRepos.add(r); } Resolver obrResolver = repositoryAdmin.resolver(resolveRepos.toArray(new Repository[resolveRepos.size()])); addPlatformRepositories (obrResolver, appName, platformRepository); log.debug(LOG_EXIT, "getConfiguredObrResolver", obrResolver); return obrResolver; } @Deprecated @Override public Set<BundleInfo> resolve(AriesApplication app, ResolveConstraint... constraints) throws ResolverException { log.trace("resolving {}", app); ApplicationMetadata appMeta = app.getApplicationMetadata(); String appName = appMeta.getApplicationSymbolicName(); Version appVersion = appMeta.getApplicationVersion(); List<Content> appContent = appMeta.getApplicationContents(); Collection<Content> useBundleContent = appMeta.getUseBundles(); List<Content> contents = new ArrayList<Content>(); contents.addAll(appContent); contents.addAll(useBundleContent); if ((constraints != null ) && (constraints.length > 0 )) { for (ResolveConstraint con: constraints) { contents.add(ManifestHeaderProcessor.parseContent(con.getBundleName(), con.getVersionRange().toString())); } } Resolver obrResolver = getConfiguredObrResolver(appName, appVersion.toString(), toModelledResource(app.getBundleInfo())); // add a resource describing the requirements of the application metadata. obrResolver.add(createApplicationResource( appName, appVersion, contents)); if (obrResolver.resolve()) { Set<BundleInfo> result = new HashSet<BundleInfo>(); List<Resource> requiredResources = retrieveRequiredResources(obrResolver); for (Resource resource: requiredResources) { BundleInfo bundleInfo = toBundleInfo(resource, false); result.add(bundleInfo); } if (returnOptionalResources) { for (Resource resource: obrResolver.getOptionalResources()) { BundleInfo bundleInfo = toBundleInfo(resource, true); result.add(bundleInfo); } } return result; } else { Reason[] reasons = obrResolver.getUnsatisfiedRequirements(); //refine the list by removing the indirect unsatisfied bundles that are caused by unsatisfied packages or other bundles Map<String,Set<String>> refinedReqs = refineUnsatisfiedRequirements(obrResolver, reasons); StringBuffer reqList = new StringBuffer(); Map<String, String> unsatisfiedRequirements = extractConsumableMessageInfo(refinedReqs); for (String reason : unsatisfiedRequirements.keySet()) { reqList.append('\n'); reqList.append(reason); } ResolverException re = new ResolverException(MessageUtil.getMessage("RESOLVER_UNABLE_TO_RESOLVE", new Object[] { app.getApplicationMetadata().getApplicationName(), reqList })); re.setUnsatisfiedRequirementsAndReasons(unsatisfiedRequirements); log.debug(LOG_EXIT, "resolve", re); throw re; } } @Override public BundleInfo getBundleInfo(String bundleSymbolicName, Version bundleVersion) { Map<String, String> attribs = new HashMap<String, String>(); attribs.put(Resource.VERSION, bundleVersion.toString()); String filterString = ManifestHeaderProcessor.generateFilter(Resource.SYMBOLIC_NAME, bundleSymbolicName, attribs); Resource[] resources; try { resources = repositoryAdmin.discoverResources(filterString); if (resources != null && resources.length > 0) { return toBundleInfo(resources[0], false); } else { return null; } } catch (InvalidSyntaxException e) { log.error("Invalid filter", e); return null; } } /* A 'platform repository' describes capabilities of the target runtime environment * These should be added to the resolver without being listed as coming from a particular * repository or bundle. */ private void addPlatformRepositories (Resolver obrResolver, String appName, PlatformRepository platformRepository) { log.debug(LOG_ENTRY, "addPlatformRepositories", new Object[]{obrResolver, appName}); DataModelHelper helper = repositoryAdmin.getHelper(); if (platformRepository != null) { Collection<URI> uris = platformRepository.getPlatformRepositoryURLs(); if ((uris != null) && (!uris.isEmpty())) { for (URI uri : uris) { InputStream is = null; try { is = uri.toURL().openStream(); Reader repoReader = new InputStreamReader(is); Repository aPlatformRepo = helper.readRepository(repoReader); Resource resources[] = aPlatformRepo.getResources(); for (Resource r : resources) { Capability[] caps = r.getCapabilities(); for (Capability c : caps) { obrResolver.addGlobalCapability(c); } } } catch (Exception e) { // not a big problem log.error(MessageUtil.getMessage("RESOLVER_UNABLE_TO_READ_REPOSITORY_EXCEPTION", new Object[]{appName, uri}) ); } finally { IOUtils.close(is); } } } } log.debug(LOG_EXIT, "addPlatformRepositories"); } private Resource createApplicationResource( String appName, Version appVersion, List<Content> appContent) { return new ApplicationResourceImpl(appName, appVersion, appContent); } private Resource createApplicationResource( String appName, String appVersion, Collection<ImportedBundle> inputs) { return new ApplicationResourceImpl(appName, Version.parseVersion(appVersion), inputs); } private BundleInfo toBundleInfo(Resource resource, boolean optional) { Map<String, String> directives = null; if (optional) { directives = new HashMap<String, String>(); directives.put(Constants.RESOLUTION_DIRECTIVE, Constants.RESOLUTION_OPTIONAL); } return new OBRBundleInfo(resource.getSymbolicName(), resource.getVersion(), resource.getURI(), null, null, null, null, null, null, directives, null); } /** * Get the list of resources returned by the resolver * @param resolver OBR resolver * @return a list of required resources */ protected List<Resource> retrieveRequiredResources(Resolver resolver) { log.debug(LOG_ENTRY,"retrieveRequiredResources", resolver); Map<String, List<Resource>> resourcesByName = new HashMap<String, List<Resource>>(); for (Resource r : resolver.getRequiredResources()) { resourcesByName.put(r.getSymbolicName(), mergeResource(resolver, r, resourcesByName.get(r .getSymbolicName()))); } List<Resource> result = new ArrayList<Resource>(); for (List<Resource> res : resourcesByName.values()) { result.addAll(res); } log.debug(LOG_EXIT, "retrieveRequiredResources", result); return result; } /** * Get rid of the redundant resources * @param resolver OBR resolver * @param r a resource * @param list similar resources * @return the list of minimum resources */ protected List<Resource> mergeResource(Resolver resolver, Resource r, List<Resource> list) { log.debug(LOG_ENTRY, "mergeResource", new Object[]{resolver, r, list}); if (list == null) { log.debug(LOG_EXIT, "mergeResource", Arrays.asList(r)); return Arrays.asList(r); } else { List<Resource> result = new ArrayList<Resource>(); for (Resource old : list) { boolean oldRedundant = satisfiesAll(r, resolver.getReason(old)); boolean newRedundant = satisfiesAll(old, resolver.getReason(r)); if (oldRedundant && newRedundant) { int comp = old.getVersion().compareTo(r.getVersion()); oldRedundant = comp < 0; newRedundant = comp >= 0; } if (newRedundant) { log.debug(LOG_EXIT, "mergeResource", list); return list; } else if (oldRedundant) { // do nothing -> so don't add the old resource to the new list } else { result.add(old); } } result.add(r); log.debug(LOG_EXIT, "mergeResource", result); return result; } } protected boolean satisfiesAll(Resource res, Reason[] reasons) { log.debug(LOG_ENTRY,"satisfiesAll", new Object[] {res, Arrays.toString(reasons)}); //Let's convert the reason to requirement List<Requirement> reqs = new ArrayList<Requirement>(); for (Reason reason : reasons) { reqs.add(reason.getRequirement()); } boolean result = true; outer: for (Requirement r : reqs) { boolean found = false; inner: for (Capability c : res.getCapabilities()) { if (r.isSatisfied(c)) { found = true; break inner; } } if (!!!found && !!!r.isOptional()) { result = false; break outer; } } log.debug(LOG_EXIT, "satisfiesAll", result); return result; } private static final Set<String> SPECIAL_FILTER_ATTRS = Collections .unmodifiableSet(new HashSet<String>(Arrays.asList(ModellingConstants.OBR_PACKAGE, ModellingConstants.OBR_SYMBOLIC_NAME, ModellingConstants.OBR_SERVICE, Constants.VERSION_ATTRIBUTE))); /** * Turn a requirement into a human readable String for debug. * @param filter The filter that is failing * @param bundlesFailing For problems with a bundle, the set of bundles that have a problem * @return human readable form */ private Map<String, String> extractConsumableMessageInfo( Map<String, Set<String>> refinedReqs) { log.debug(LOG_ENTRY, "extractConsumableMessageInfo", refinedReqs); Map<String, String> unsatisfiedRequirements = new HashMap<String, String>(); for (Map.Entry<String, Set<String>> filterEntry : refinedReqs.entrySet()) { String filter = filterEntry.getKey(); Set<String> bundlesFailing = filterEntry.getValue(); log.debug("unable to satisfy the filter , filter = " + filter + "required by " + Arrays.toString(bundlesFailing.toArray())); Map<String, String> attrs = ManifestHeaderProcessor.parseFilter(filter); Map<String, String> customAttrs = new HashMap<String, String>(); for (Map.Entry<String, String> e : attrs.entrySet()) { if (!SPECIAL_FILTER_ATTRS.contains(e.getKey())) { customAttrs.put(e.getKey(), e.getValue()); } } StringBuilder msgKey = new StringBuilder(); List<Object> inserts = new ArrayList<Object>(); final String type; boolean unknownType = false; if (attrs.containsKey(ModellingConstants.OBR_PACKAGE)) { type = ModellingConstants.OBR_PACKAGE; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_PACKAGE"); inserts.add(attrs.get(ModellingConstants.OBR_PACKAGE)); } else if (attrs.containsKey(ModellingConstants.OBR_SYMBOLIC_NAME)) { type = ModellingConstants.OBR_SYMBOLIC_NAME; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_BUNDLE"); inserts.add(attrs.get(ModellingConstants.OBR_SYMBOLIC_NAME)); } else if (attrs.containsKey(ModellingConstants.OBR_SERVICE)) { type = ModellingConstants.OBR_SERVICE; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_SERVICE"); // No insert for service name as the name must be "*" to match any // Service capability } else { type = ModellingConstants.OBR_UNKNOWN; unknownType = true; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_FILTER"); inserts.add(filter); } if (bundlesFailing != null && bundlesFailing.size() != 0) { msgKey.append("_REQUIRED_BY_BUNDLE"); if (bundlesFailing.size() == 1) inserts.add(bundlesFailing.iterator().next()); // Just take the string // if there's only one // of them else inserts.add(bundlesFailing.toString()); // Add the whole set if there // isn't exactly one } if (!unknownType && !customAttrs.isEmpty()) { msgKey.append("_WITH_ATTRS"); inserts.add(customAttrs); } if (!unknownType && attrs.containsKey(Constants.VERSION_ATTRIBUTE)) { msgKey.append("_WITH_VERSION"); VersionRange vr = ManifestHeaderProcessor.parseVersionRange(attrs .get(Constants.VERSION_ATTRIBUTE)); inserts.add(vr.getMinimumVersion()); if (!!!vr.isExactVersion()) { msgKey.append(vr.isMinimumExclusive() ? "_LOWEX" : "_LOW"); if (vr.getMaximumVersion() != null) { msgKey.append(vr.isMaximumExclusive() ? "_UPEX" : "_UP"); inserts.add(vr.getMaximumVersion()); } } } String msgKeyStr = msgKey.toString(); String msg = MessageUtil.getMessage(msgKeyStr, inserts.toArray()); unsatisfiedRequirements.put(msg, type); } log.debug(LOG_EXIT, "extractConsumableMessageInfo", unsatisfiedRequirements); return unsatisfiedRequirements; } /** * Refine the unsatisfied requirements ready for later human comsumption * * @param resolver The resolver to be used to refine the requirements * @param reasons The reasons * @return A map of the unsatifiedRequirement to the set of bundles that have that requirement unsatisfied (values associated with the keys can be null) */ private Map<String,Set<String>> refineUnsatisfiedRequirements(Resolver resolver, Reason[] reasons) { log.debug(LOG_ENTRY, "refineUnsatisfiedRequirements", new Object[]{resolver, Arrays.toString(reasons)}); Map<Requirement,Set<String>> req_resources = new HashMap<Requirement,Set<String>>(); // add the reasons to the map, use the requirement as the key, the resources required the requirement as the values Set<Resource> resources = new HashSet<Resource>(); for (Reason reason: reasons) { resources.add(reason.getResource()); Requirement key = reason.getRequirement(); String value = reason.getResource().getSymbolicName()+"_" + reason.getResource().getVersion().toString(); Set<String> values = req_resources.get(key); if (values == null) { values = new HashSet<String>(); } values.add(value); req_resources.put(key, values); } // remove the requirements that can be satisifed by the resources. It is listed because the resources are not satisfied by other requirements. // For an instance, the unsatisfied reasons are [package a, required by bundle aa], [package b, required by bundle bb] and [package c, required by bundle cc], // If the bundle aa exports the package a and c. In our error message, we only want to display package a is needed by bundle aa. // Go through each requirement and find out whether the requirement can be satisfied by the reasons. Set<Capability> caps = new HashSet<Capability>(); for (Resource res : resources) { if ((res !=null) && (res.getCapabilities() != null)) { List<Capability> capList = Arrays.asList(res.getCapabilities()); if (capList != null) { caps.addAll(capList); } } } Iterator<Map.Entry<Requirement, Set<String>>> iterator = req_resources.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Requirement, Set<String>> entry = iterator.next(); Requirement req = entry.getKey(); for (Capability cap :caps) { if (req.isSatisfied(cap)){ // remove the key from the map iterator.remove(); break; } } } //Now the map only contains the necessary missing requirements Map<String,Set<String>> result = new HashMap<String, Set<String>>(); for (Map.Entry<Requirement, Set<String>> req_res : req_resources.entrySet()) { result.put(req_res.getKey().getFilter(), req_res.getValue()); } log.debug(LOG_EXIT, "refineUnsatisfiedRequirements", new Object[]{result}); return result; } private Collection<ImportedBundle> toImportedBundle(Collection<Content> content) throws ResolverException { log.debug(LOG_ENTRY, "toImportedBundle", content); List<ImportedBundle> result = new ArrayList<ImportedBundle>(); for (Content c : content) { try { result.add(modellingManager.getImportedBundle(c.getContentName(), c.getVersion().toString())); } catch (InvalidAttributeException iae) { throw new ResolverException(iae); } } log.debug(LOG_EXIT, "toImportedBundle", result); return result; } private Collection<ModelledResource> toModelledResource(Collection<BundleInfo> bundleInfos) throws ResolverException{ Collection<ModelledResource> result = new ArrayList<ModelledResource>(); if ((bundleInfos != null) && (!!!bundleInfos.isEmpty())) { for (BundleInfo bi : bundleInfos) { try { result.add(modellingManager.getModelledResource(null, bi, null, null)); } catch (InvalidAttributeException iae) { throw new ResolverException(iae); } } } return result; } private Repository getLocalRepository(RepositoryAdmin admin) { Repository localRepository = repositoryAdmin.getLocalRepository(); Resource[] resources = localRepository.getResources(); Resource[] newResources = new Resource[resources.length]; for (int i = 0; i < resources.length; i++) { newResources[i] = new ResourceWrapper(resources[i]); } return repositoryAdmin.getHelper().repository(newResources); } private boolean excludeLocalRuntime() { return Boolean.parseBoolean(System.getProperty(AppConstants.PROVISON_EXCLUDE_LOCAL_REPO_SYSPROP)); } }
application/application-obr-resolver/src/main/java/org/apache/aries/application/resolver/obr/OBRAriesResolver.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.application.resolver.obr; import static org.apache.aries.application.utils.AppConstants.LOG_ENTRY; import static org.apache.aries.application.utils.AppConstants.LOG_EXIT; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.aries.application.ApplicationMetadata; import org.apache.aries.application.Content; import org.apache.aries.application.InvalidAttributeException; import org.apache.aries.application.VersionRange; import org.apache.aries.application.management.AriesApplication; import org.apache.aries.application.management.BundleInfo; import org.apache.aries.application.management.ResolveConstraint; import org.apache.aries.application.management.ResolverException; import org.apache.aries.application.management.spi.repository.PlatformRepository; import org.apache.aries.application.management.spi.resolve.AriesApplicationResolver; import org.apache.aries.application.modelling.ImportedBundle; import org.apache.aries.application.modelling.ModelledResource; import org.apache.aries.application.modelling.ModellingConstants; import org.apache.aries.application.modelling.ModellingManager; import org.apache.aries.application.modelling.utils.ModellingHelper; import org.apache.aries.application.resolver.internal.MessageUtil; import org.apache.aries.application.resolver.obr.impl.ApplicationResourceImpl; import org.apache.aries.application.resolver.obr.impl.ModelledBundleResource; import org.apache.aries.application.resolver.obr.impl.OBRBundleInfo; import org.apache.aries.application.resolver.obr.impl.RepositoryGeneratorImpl; import org.apache.aries.application.resolver.obr.impl.ResourceWrapper; import org.apache.aries.application.utils.AppConstants; import org.apache.aries.application.utils.filesystem.IOUtils; import org.apache.aries.application.utils.manifest.ManifestHeaderProcessor; import org.apache.aries.application.utils.manifest.ManifestHeaderProcessor.NameValueMap; import org.apache.felix.bundlerepository.Capability; import org.apache.felix.bundlerepository.DataModelHelper; import org.apache.felix.bundlerepository.Reason; import org.apache.felix.bundlerepository.Repository; import org.apache.felix.bundlerepository.RepositoryAdmin; import org.apache.felix.bundlerepository.Requirement; import org.apache.felix.bundlerepository.Resolver; import org.apache.felix.bundlerepository.Resource; import org.osgi.framework.Constants; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.Version; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @version $Rev$ $Date$ */ public class OBRAriesResolver implements AriesApplicationResolver { private static Logger log = LoggerFactory.getLogger(OBRAriesResolver.class); private final RepositoryAdmin repositoryAdmin; private boolean returnOptionalResources = true; private PlatformRepository platformRepository; private ModellingManager modellingManager; private ModellingHelper modellingHelper; public void setModellingManager (ModellingManager m) { modellingManager = m; } public void setModellingHelper (ModellingHelper mh) { modellingHelper = mh; } public PlatformRepository getPlatformRepository() { return platformRepository; } public RepositoryAdmin getRepositoryAdmin() { return this.repositoryAdmin; } public void setPlatformRepository(PlatformRepository platformRepository) { this.platformRepository = platformRepository; } public OBRAriesResolver(RepositoryAdmin repositoryAdmin) { this.repositoryAdmin = repositoryAdmin; } public void setReturnOptionalResources(boolean optional) { this.returnOptionalResources = optional; } public boolean getReturnOptionalResources() { return returnOptionalResources; } public Collection<ModelledResource> resolve(String appName, String appVersion, Collection<ModelledResource> byValueBundles, Collection<Content> inputs) throws ResolverException { return resolve(appName, appVersion, byValueBundles, inputs, this.platformRepository); } /** * Resolve a list of resources from the OBR bundle repositories by OBR * resolver. * * @param appName - application name * @param appVersion - application version * @param byValueBundles - by value bundles * @param inputs - other constraints * @param platformRepository - a platform repository to use instead of the one provided as a service * @return a collection of modelled resources required by this application * @throws ResolverException */ @Override public Collection<ModelledResource> resolve(String appName, String appVersion, Collection<ModelledResource> byValueBundles, Collection<Content> inputs, PlatformRepository platformRepository) throws ResolverException { log.debug(LOG_ENTRY, "resolve", new Object[]{appName, appVersion,byValueBundles, inputs}); Collection<ImportedBundle> importedBundles = toImportedBundle(inputs); Collection<ModelledResource> toReturn = new ArrayList<ModelledResource>(); Resolver obrResolver = getConfiguredObrResolver(appName, appVersion, byValueBundles, platformRepository); // add a resource describing the requirements of the application metadata. obrResolver.add(createApplicationResource( appName, appVersion, importedBundles)); if (obrResolver.resolve()) { List<Resource> requiredResources = retrieveRequiredResources(obrResolver); if (requiredResources == null) { log.debug("resolver.getRequiredResources() returned null"); } else { for (Resource r : requiredResources) { NameValueMap<String, String> attribs = new NameValueMap<String, String>(); attribs.put(Constants.VERSION_ATTRIBUTE, "[" + r.getVersion() + ',' + r.getVersion() + "]"); ModelledResource modelledResourceForThisMatch = null; // OBR may give us back the global capabilities. Typically these do not have a bundle symbolic name - they're a // list of packages available in the target runtime environment. If the resource has no symbolic name, we can ignore it if (r.getSymbolicName() != null) { try { modelledResourceForThisMatch = new ModelledBundleResource (r, modellingManager, modellingHelper); } catch (InvalidAttributeException iax) { ResolverException re = new ResolverException("Internal error occurred: " + iax.toString()); log.debug(LOG_EXIT, "resolve", re); throw re; } toReturn.add(modelledResourceForThisMatch); } } } log.debug(LOG_EXIT, "resolve", toReturn); return toReturn; } else { Reason[] reasons = obrResolver.getUnsatisfiedRequirements(); // let's refine the list by removing the indirect unsatisfied bundles that are caused by unsatisfied packages or other bundles Map<String,Set<String>> refinedReqs = refineUnsatisfiedRequirements(obrResolver, reasons); StringBuffer reqList = new StringBuffer(); Map<String, String> unsatisfiedRequirements = extractConsumableMessageInfo(refinedReqs); for (String reason : unsatisfiedRequirements.values()) { reqList.append('\n'); reqList.append(reason); } ResolverException re = new ResolverException(MessageUtil.getMessage("RESOLVER_UNABLE_TO_RESOLVE", new Object[] { appName, reqList })); re.setUnsatisfiedRequirementsAndReasons(unsatisfiedRequirements); log.debug(LOG_EXIT, "resolve", re); throw re; } } private Resolver getConfiguredObrResolver(String appName, String appVersion, Collection<ModelledResource> byValueBundles) throws ResolverException { return getConfiguredObrResolver(appName, appVersion, byValueBundles, platformRepository); } private Resolver getConfiguredObrResolver(String appName, String appVersion, Collection<ModelledResource> byValueBundles, PlatformRepository platformRepository) throws ResolverException { log.debug(LOG_ENTRY, "getConfiguredObrResolver", new Object[]{appName, appVersion,byValueBundles }); DataModelHelper helper = repositoryAdmin.getHelper(); Repository appRepo; try { ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); RepositoryGeneratorImpl.generateRepository(repositoryAdmin, appName + "_" + appVersion, byValueBundles, bytesOut); appRepo = helper.readRepository(new InputStreamReader(new ByteArrayInputStream(bytesOut.toByteArray()))); } catch (Exception e) { throw new ResolverException(e); } List<Repository> resolveRepos = new ArrayList<Repository>(); // add system repository resolveRepos.add(repositoryAdmin.getSystemRepository()); // add local repository if configured if (!(excludeLocalRuntime())) { resolveRepos.add(getLocalRepository(repositoryAdmin)); } // add application repository resolveRepos.add(appRepo); // Need to refresh the repositories added to repository admin // add user-defined repositories Repository[] repos = repositoryAdmin.listRepositories(); for (Repository r : repos) { resolveRepos.add(r); } Resolver obrResolver = repositoryAdmin.resolver(resolveRepos.toArray(new Repository[resolveRepos.size()])); addPlatformRepositories (obrResolver, appName, platformRepository); log.debug(LOG_EXIT, "getConfiguredObrResolver", obrResolver); return obrResolver; } @Deprecated @Override public Set<BundleInfo> resolve(AriesApplication app, ResolveConstraint... constraints) throws ResolverException { log.trace("resolving {}", app); ApplicationMetadata appMeta = app.getApplicationMetadata(); String appName = appMeta.getApplicationSymbolicName(); Version appVersion = appMeta.getApplicationVersion(); List<Content> appContent = appMeta.getApplicationContents(); Collection<Content> useBundleContent = appMeta.getUseBundles(); List<Content> contents = new ArrayList<Content>(); contents.addAll(appContent); contents.addAll(useBundleContent); if ((constraints != null ) && (constraints.length > 0 )) { for (ResolveConstraint con: constraints) { contents.add(ManifestHeaderProcessor.parseContent(con.getBundleName(), con.getVersionRange().toString())); } } Resolver obrResolver = getConfiguredObrResolver(appName, appVersion.toString(), toModelledResource(app.getBundleInfo())); // add a resource describing the requirements of the application metadata. obrResolver.add(createApplicationResource( appName, appVersion, contents)); if (obrResolver.resolve()) { Set<BundleInfo> result = new HashSet<BundleInfo>(); List<Resource> requiredResources = retrieveRequiredResources(obrResolver); for (Resource resource: requiredResources) { BundleInfo bundleInfo = toBundleInfo(resource, false); result.add(bundleInfo); } if (returnOptionalResources) { for (Resource resource: obrResolver.getOptionalResources()) { BundleInfo bundleInfo = toBundleInfo(resource, true); result.add(bundleInfo); } } return result; } else { Reason[] reasons = obrResolver.getUnsatisfiedRequirements(); //refine the list by removing the indirect unsatisfied bundles that are caused by unsatisfied packages or other bundles Map<String,Set<String>> refinedReqs = refineUnsatisfiedRequirements(obrResolver, reasons); StringBuffer reqList = new StringBuffer(); Map<String, String> unsatisfiedRequirements = extractConsumableMessageInfo(refinedReqs); for (String reason : unsatisfiedRequirements.values()) { reqList.append('\n'); reqList.append(reason); } ResolverException re = new ResolverException(MessageUtil.getMessage("RESOLVER_UNABLE_TO_RESOLVE", new Object[] { app.getApplicationMetadata().getApplicationName(), reqList })); re.setUnsatisfiedRequirementsAndReasons(unsatisfiedRequirements); log.debug(LOG_EXIT, "resolve", re); throw re; } } @Override public BundleInfo getBundleInfo(String bundleSymbolicName, Version bundleVersion) { Map<String, String> attribs = new HashMap<String, String>(); attribs.put(Resource.VERSION, bundleVersion.toString()); String filterString = ManifestHeaderProcessor.generateFilter(Resource.SYMBOLIC_NAME, bundleSymbolicName, attribs); Resource[] resources; try { resources = repositoryAdmin.discoverResources(filterString); if (resources != null && resources.length > 0) { return toBundleInfo(resources[0], false); } else { return null; } } catch (InvalidSyntaxException e) { log.error("Invalid filter", e); return null; } } /* A 'platform repository' describes capabilities of the target runtime environment * These should be added to the resolver without being listed as coming from a particular * repository or bundle. */ private void addPlatformRepositories (Resolver obrResolver, String appName, PlatformRepository platformRepository) { log.debug(LOG_ENTRY, "addPlatformRepositories", new Object[]{obrResolver, appName}); DataModelHelper helper = repositoryAdmin.getHelper(); if (platformRepository != null) { Collection<URI> uris = platformRepository.getPlatformRepositoryURLs(); if ((uris != null) && (!uris.isEmpty())) { for (URI uri : uris) { InputStream is = null; try { is = uri.toURL().openStream(); Reader repoReader = new InputStreamReader(is); Repository aPlatformRepo = helper.readRepository(repoReader); Resource resources[] = aPlatformRepo.getResources(); for (Resource r : resources) { Capability[] caps = r.getCapabilities(); for (Capability c : caps) { obrResolver.addGlobalCapability(c); } } } catch (Exception e) { // not a big problem log.error(MessageUtil.getMessage("RESOLVER_UNABLE_TO_READ_REPOSITORY_EXCEPTION", new Object[]{appName, uri}) ); } finally { IOUtils.close(is); } } } } log.debug(LOG_EXIT, "addPlatformRepositories"); } private Resource createApplicationResource( String appName, Version appVersion, List<Content> appContent) { return new ApplicationResourceImpl(appName, appVersion, appContent); } private Resource createApplicationResource( String appName, String appVersion, Collection<ImportedBundle> inputs) { return new ApplicationResourceImpl(appName, Version.parseVersion(appVersion), inputs); } private BundleInfo toBundleInfo(Resource resource, boolean optional) { Map<String, String> directives = null; if (optional) { directives = new HashMap<String, String>(); directives.put(Constants.RESOLUTION_DIRECTIVE, Constants.RESOLUTION_OPTIONAL); } return new OBRBundleInfo(resource.getSymbolicName(), resource.getVersion(), resource.getURI(), null, null, null, null, null, null, directives, null); } /** * Get the list of resources returned by the resolver * @param resolver OBR resolver * @return a list of required resources */ protected List<Resource> retrieveRequiredResources(Resolver resolver) { log.debug(LOG_ENTRY,"retrieveRequiredResources", resolver); Map<String, List<Resource>> resourcesByName = new HashMap<String, List<Resource>>(); for (Resource r : resolver.getRequiredResources()) { resourcesByName.put(r.getSymbolicName(), mergeResource(resolver, r, resourcesByName.get(r .getSymbolicName()))); } List<Resource> result = new ArrayList<Resource>(); for (List<Resource> res : resourcesByName.values()) { result.addAll(res); } log.debug(LOG_EXIT, "retrieveRequiredResources", result); return result; } /** * Get rid of the redundant resources * @param resolver OBR resolver * @param r a resource * @param list similar resources * @return the list of minimum resources */ protected List<Resource> mergeResource(Resolver resolver, Resource r, List<Resource> list) { log.debug(LOG_ENTRY, "mergeResource", new Object[]{resolver, r, list}); if (list == null) { log.debug(LOG_EXIT, "mergeResource", Arrays.asList(r)); return Arrays.asList(r); } else { List<Resource> result = new ArrayList<Resource>(); for (Resource old : list) { boolean oldRedundant = satisfiesAll(r, resolver.getReason(old)); boolean newRedundant = satisfiesAll(old, resolver.getReason(r)); if (oldRedundant && newRedundant) { int comp = old.getVersion().compareTo(r.getVersion()); oldRedundant = comp < 0; newRedundant = comp >= 0; } if (newRedundant) { log.debug(LOG_EXIT, "mergeResource", list); return list; } else if (oldRedundant) { // do nothing -> so don't add the old resource to the new list } else { result.add(old); } } result.add(r); log.debug(LOG_EXIT, "mergeResource", result); return result; } } protected boolean satisfiesAll(Resource res, Reason[] reasons) { log.debug(LOG_ENTRY,"satisfiesAll", new Object[] {res, Arrays.toString(reasons)}); //Let's convert the reason to requirement List<Requirement> reqs = new ArrayList<Requirement>(); for (Reason reason : reasons) { reqs.add(reason.getRequirement()); } boolean result = true; outer: for (Requirement r : reqs) { boolean found = false; inner: for (Capability c : res.getCapabilities()) { if (r.isSatisfied(c)) { found = true; break inner; } } if (!!!found && !!!r.isOptional()) { result = false; break outer; } } log.debug(LOG_EXIT, "satisfiesAll", result); return result; } private static final Set<String> SPECIAL_FILTER_ATTRS = Collections .unmodifiableSet(new HashSet<String>(Arrays.asList(ModellingConstants.OBR_PACKAGE, ModellingConstants.OBR_SYMBOLIC_NAME, ModellingConstants.OBR_SERVICE, Constants.VERSION_ATTRIBUTE))); /** * Turn a requirement into a human readable String for debug. * @param filter The filter that is failing * @param bundlesFailing For problems with a bundle, the set of bundles that have a problem * @return human readable form */ private Map<String, String> extractConsumableMessageInfo( Map<String, Set<String>> refinedReqs) { log.debug(LOG_ENTRY, "extractConsumableMessageInfo", refinedReqs); Map<String, String> unsatisfiedRequirements = new HashMap<String, String>(); for (Map.Entry<String, Set<String>> filterEntry : refinedReqs.entrySet()) { String filter = filterEntry.getKey(); Set<String> bundlesFailing = filterEntry.getValue(); log.debug("unable to satisfy the filter , filter = " + filter + "required by " + Arrays.toString(bundlesFailing.toArray())); Map<String, String> attrs = ManifestHeaderProcessor.parseFilter(filter); Map<String, String> customAttrs = new HashMap<String, String>(); for (Map.Entry<String, String> e : attrs.entrySet()) { if (!SPECIAL_FILTER_ATTRS.contains(e.getKey())) { customAttrs.put(e.getKey(), e.getValue()); } } StringBuilder msgKey = new StringBuilder(); List<Object> inserts = new ArrayList<Object>(); final String type; boolean unknownType = false; if (attrs.containsKey(ModellingConstants.OBR_PACKAGE)) { type = ModellingConstants.OBR_PACKAGE; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_PACKAGE"); inserts.add(attrs.get(ModellingConstants.OBR_PACKAGE)); } else if (attrs.containsKey(ModellingConstants.OBR_SYMBOLIC_NAME)) { type = ModellingConstants.OBR_SYMBOLIC_NAME; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_BUNDLE"); inserts.add(attrs.get(ModellingConstants.OBR_SYMBOLIC_NAME)); } else if (attrs.containsKey(ModellingConstants.OBR_SERVICE)) { type = ModellingConstants.OBR_SERVICE; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_SERVICE"); // No insert for service name as the name must be "*" to match any // Service capability } else { type = ModellingConstants.OBR_UNKNOWN; unknownType = true; msgKey.append("RESOLVER_UNABLE_TO_RESOLVE_FILTER"); inserts.add(filter); } if (bundlesFailing != null && bundlesFailing.size() != 0) { msgKey.append("_REQUIRED_BY_BUNDLE"); if (bundlesFailing.size() == 1) inserts.add(bundlesFailing.iterator().next()); // Just take the string // if there's only one // of them else inserts.add(bundlesFailing.toString()); // Add the whole set if there // isn't exactly one } if (!unknownType && !customAttrs.isEmpty()) { msgKey.append("_WITH_ATTRS"); inserts.add(customAttrs); } if (!unknownType && attrs.containsKey(Constants.VERSION_ATTRIBUTE)) { msgKey.append("_WITH_VERSION"); VersionRange vr = ManifestHeaderProcessor.parseVersionRange(attrs .get(Constants.VERSION_ATTRIBUTE)); inserts.add(vr.getMinimumVersion()); if (!!!vr.isExactVersion()) { msgKey.append(vr.isMinimumExclusive() ? "_LOWEX" : "_LOW"); if (vr.getMaximumVersion() != null) { msgKey.append(vr.isMaximumExclusive() ? "_UPEX" : "_UP"); inserts.add(vr.getMaximumVersion()); } } } String msgKeyStr = msgKey.toString(); String msg = MessageUtil.getMessage(msgKeyStr, inserts.toArray()); unsatisfiedRequirements.put(msg, type); } log.debug(LOG_EXIT, "extractConsumableMessageInfo", unsatisfiedRequirements); return unsatisfiedRequirements; } /** * Refine the unsatisfied requirements ready for later human comsumption * * @param resolver The resolver to be used to refine the requirements * @param reasons The reasons * @return A map of the unsatifiedRequirement to the set of bundles that have that requirement unsatisfied (values associated with the keys can be null) */ private Map<String,Set<String>> refineUnsatisfiedRequirements(Resolver resolver, Reason[] reasons) { log.debug(LOG_ENTRY, "refineUnsatisfiedRequirements", new Object[]{resolver, Arrays.toString(reasons)}); Map<Requirement,Set<String>> req_resources = new HashMap<Requirement,Set<String>>(); // add the reasons to the map, use the requirement as the key, the resources required the requirement as the values Set<Resource> resources = new HashSet<Resource>(); for (Reason reason: reasons) { resources.add(reason.getResource()); Requirement key = reason.getRequirement(); String value = reason.getResource().getSymbolicName()+"_" + reason.getResource().getVersion().toString(); Set<String> values = req_resources.get(key); if (values == null) { values = new HashSet<String>(); } values.add(value); req_resources.put(key, values); } // remove the requirements that can be satisifed by the resources. It is listed because the resources are not satisfied by other requirements. // For an instance, the unsatisfied reasons are [package a, required by bundle aa], [package b, required by bundle bb] and [package c, required by bundle cc], // If the bundle aa exports the package a and c. In our error message, we only want to display package a is needed by bundle aa. // Go through each requirement and find out whether the requirement can be satisfied by the reasons. Set<Capability> caps = new HashSet<Capability>(); for (Resource res : resources) { if ((res !=null) && (res.getCapabilities() != null)) { List<Capability> capList = Arrays.asList(res.getCapabilities()); if (capList != null) { caps.addAll(capList); } } } Iterator<Map.Entry<Requirement, Set<String>>> iterator = req_resources.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Requirement, Set<String>> entry = iterator.next(); Requirement req = entry.getKey(); for (Capability cap :caps) { if (req.isSatisfied(cap)){ // remove the key from the map iterator.remove(); break; } } } //Now the map only contains the necessary missing requirements Map<String,Set<String>> result = new HashMap<String, Set<String>>(); for (Map.Entry<Requirement, Set<String>> req_res : req_resources.entrySet()) { result.put(req_res.getKey().getFilter(), req_res.getValue()); } log.debug(LOG_EXIT, "refineUnsatisfiedRequirements", new Object[]{result}); return result; } private Collection<ImportedBundle> toImportedBundle(Collection<Content> content) throws ResolverException { log.debug(LOG_ENTRY, "toImportedBundle", content); List<ImportedBundle> result = new ArrayList<ImportedBundle>(); for (Content c : content) { try { result.add(modellingManager.getImportedBundle(c.getContentName(), c.getVersion().toString())); } catch (InvalidAttributeException iae) { throw new ResolverException(iae); } } log.debug(LOG_EXIT, "toImportedBundle", result); return result; } private Collection<ModelledResource> toModelledResource(Collection<BundleInfo> bundleInfos) throws ResolverException{ Collection<ModelledResource> result = new ArrayList<ModelledResource>(); if ((bundleInfos != null) && (!!!bundleInfos.isEmpty())) { for (BundleInfo bi : bundleInfos) { try { result.add(modellingManager.getModelledResource(null, bi, null, null)); } catch (InvalidAttributeException iae) { throw new ResolverException(iae); } } } return result; } private Repository getLocalRepository(RepositoryAdmin admin) { Repository localRepository = repositoryAdmin.getLocalRepository(); Resource[] resources = localRepository.getResources(); Resource[] newResources = new Resource[resources.length]; for (int i = 0; i < resources.length; i++) { newResources[i] = new ResourceWrapper(resources[i]); } return repositoryAdmin.getHelper().repository(newResources); } private boolean excludeLocalRuntime() { return Boolean.parseBoolean(System.getProperty(AppConstants.PROVISON_EXCLUDE_LOCAL_REPO_SYSPROP)); } }
ARIES-557: Correcting swapped parameters in message. git-svn-id: f3027bd689517dd712b868b0d3f5f59c3162b83d@1063708 13f79535-47bb-0310-9956-ffa450edef68
application/application-obr-resolver/src/main/java/org/apache/aries/application/resolver/obr/OBRAriesResolver.java
ARIES-557: Correcting swapped parameters in message.
Java
apache-2.0
4670d75c4e17340951e3fbdbf0199d7ae7ad8f42
0
smallmiro/nbasearc-spring-boot-starter
/* * Copyright 2010-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.redis.cluster.spring; import com.navercorp.redis.cluster.RedisClusterPoolConfig; import com.navercorp.redis.cluster.gateway.GatewayConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * {@link EnableAutoConfiguration Auto-Configuration} for nBase-Arc RedisTemplate * {@link GatewayConfig}, {@link RedisClusterConnectionFactory} and {@link StringRedisClusterTemplate} * * @author Junhwan Oh */ @Configuration @ConditionalOnProperty({"nbase.arc.gateway.zkAddress", "nbase.arc.gateway.clusterName"}) @ConditionalOnClass({GatewayConfig.class, RedisClusterConnectionFactory.class, StringRedisClusterTemplate.class, RedisClusterPoolConfig.class}) @EnableConfigurationProperties(NBaseArcSpringbootProperties.class) public class NBaseArcAutoConfiguration { private Logger logger = LoggerFactory.getLogger(NBaseArcAutoConfiguration.class); @Autowired private NBaseArcSpringbootProperties properties; @Bean @ConditionalOnMissingBean public RedisClusterPoolConfig poolConfig() { RedisClusterPoolConfig poolConfig = new RedisClusterPoolConfig(); poolConfig.setLifo(properties.getPool().isLifo()); poolConfig.setFairness(properties.getPool().isFairness()); poolConfig.setInitialSize(properties.getPool().getInitialSize()); poolConfig.setMaxWaitMillis(properties.getPool().getMaxWaitMillis()); poolConfig.setMinEvictableIdleTimeMillis(properties.getPool().getMinEvictableIdleTimeMillis()); poolConfig.setSoftMinEvictableIdleTimeMillis(properties.getPool().getSoftMinEvictableIdleTimeMillis()); poolConfig.setNumTestsPerEvictionRun(properties.getPool().getNumTestsPerEvictionRun()); poolConfig.setEvictionPolicyClassName(properties.getPool().getEvictionPolicyClassName()); poolConfig.setTestOnCreate(properties.getPool().isTestOnCreate()); poolConfig.setTestOnBorrow(properties.getPool().isTestOnBorrow()); poolConfig.setTestOnReturn(properties.getPool().isTestOnReturn()); poolConfig.setTestWhileIdle(properties.getPool().isTestWhileIdle()); poolConfig.setTimeBetweenEvictionRunsMillis(properties.getPool().getTimeBetweenEvictionRunsMillis()); poolConfig.setBlockWhenExhausted(properties.getPool().isBlockWhenExhausted()); poolConfig.setJmxEnabled(properties.getPool().isJmxEnabled()); poolConfig.setJmxNamePrefix(properties.getPool().getJmxNamePrefix()); poolConfig.setJmxNameBase(properties.getPool().getJmxNameBase()); logger.debug(poolConfig.toString()); return poolConfig; } @Bean @ConditionalOnMissingBean public GatewayConfig gatewayConfig(RedisClusterPoolConfig poolConfig) { GatewayConfig gatewayConfig = new GatewayConfig(); gatewayConfig.setPoolConfig(poolConfig); gatewayConfig.setIpAddress(properties.getGateway().getIpAddress()); gatewayConfig.setDomainAddress(properties.getGateway().getDomainAddress()); gatewayConfig.setTimeoutMillisec(properties.getGateway().getTimeoutMillisec()); gatewayConfig.setHealthCheckPeriodSeconds(properties.getGateway().getHealthCheckPeriodSeconds()); gatewayConfig.setHealthCheckThreadSize(properties.getGateway().getHealthCheckThreadSize()); gatewayConfig.setHealthCheckUsed(properties.getGateway().isHealthCheckUsed()); gatewayConfig.setGatewaySelectorMethod(properties.getGateway().getGatewaySelectorMethod()); gatewayConfig.setZkAddress(properties.getGateway().getZkAddress()); gatewayConfig.setClusterName(properties.getGateway().getClusterName()); gatewayConfig.setMaxRetry(properties.getGateway().getMaxRetry()); gatewayConfig.setBackgroundPoolSize(properties.getGateway().getBackgroundPoolSize()); gatewayConfig.setClientSyncTimeUnitMillis(properties.getGateway().getClientSyncTimeUnitMillis()); gatewayConfig.setConnectPerDelayMillis(properties.getGateway().getConnectPerDelayMillis()); gatewayConfig.setAffinityUsed(properties.getGateway().isAffinityUsed()); gatewayConfig.setZkSessionTimeout(properties.getGateway().getZkSessionTimeout()); gatewayConfig.setZkConnectTimeout(properties.getGateway().getZkConnectTimeout()); logger.debug(gatewayConfig.toString()); return gatewayConfig; } @Bean(destroyMethod = "destroy") @ConditionalOnMissingBean public RedisClusterConnectionFactory redisClusterConnectionFactory(GatewayConfig gatewayConfig) { logger.debug("Initialled redisClusterConnectionFactory zkAddress=" + gatewayConfig.getZkAddress() + ";clusterName=" + gatewayConfig.getClusterName()); RedisClusterConnectionFactory redisClusterConnectionFactory = new RedisClusterConnectionFactory(); redisClusterConnectionFactory.setConfig(gatewayConfig); return redisClusterConnectionFactory; } @Bean @ConditionalOnMissingBean public StringRedisClusterTemplate redisTemplate(RedisClusterConnectionFactory redisClusterConnectionFactory) { logger.debug("Init StringRedisClusterTemplate"); StringRedisClusterTemplate redisClusterTemplate = new StringRedisClusterTemplate(); redisClusterTemplate.setConnectionFactory(redisClusterConnectionFactory); return redisClusterTemplate; } }
nbasearc-spring-boot-autoconfigure/src/main/java/com/navercorp/redis/cluster/spring/NBaseArcAutoConfiguration.java
/* * Copyright 2010-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.redis.cluster.spring; import com.navercorp.redis.cluster.RedisClusterPoolConfig; import com.navercorp.redis.cluster.gateway.GatewayConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * {@link EnableAutoConfiguration Auto-Configuration} for nBase-Arc RedisTemplate * {@link GatewayConfig}, {@link RedisClusterConnectionFactory} and {@link StringRedisClusterTemplate} * * @author Junhwan Oh */ @Configuration @ConditionalOnProperty("nbase.arc.gateway.zkAddress") @ConditionalOnClass({GatewayConfig.class, RedisClusterConnectionFactory.class, StringRedisClusterTemplate.class, RedisClusterPoolConfig.class}) @EnableConfigurationProperties(NBaseArcSpringbootProperties.class) public class NBaseArcAutoConfiguration { private Logger logger = LoggerFactory.getLogger(NBaseArcAutoConfiguration.class); @Autowired private NBaseArcSpringbootProperties properties; @Bean @ConditionalOnMissingBean public RedisClusterPoolConfig poolConfig() { RedisClusterPoolConfig poolConfig = new RedisClusterPoolConfig(); poolConfig.setLifo(properties.getPool().isLifo()); poolConfig.setFairness(properties.getPool().isFairness()); poolConfig.setInitialSize(properties.getPool().getInitialSize()); poolConfig.setMaxWaitMillis(properties.getPool().getMaxWaitMillis()); poolConfig.setMinEvictableIdleTimeMillis(properties.getPool().getMinEvictableIdleTimeMillis()); poolConfig.setSoftMinEvictableIdleTimeMillis(properties.getPool().getSoftMinEvictableIdleTimeMillis()); poolConfig.setNumTestsPerEvictionRun(properties.getPool().getNumTestsPerEvictionRun()); poolConfig.setEvictionPolicyClassName(properties.getPool().getEvictionPolicyClassName()); poolConfig.setTestOnCreate(properties.getPool().isTestOnCreate()); poolConfig.setTestOnBorrow(properties.getPool().isTestOnBorrow()); poolConfig.setTestOnReturn(properties.getPool().isTestOnReturn()); poolConfig.setTestWhileIdle(properties.getPool().isTestWhileIdle()); poolConfig.setTimeBetweenEvictionRunsMillis(properties.getPool().getTimeBetweenEvictionRunsMillis()); poolConfig.setBlockWhenExhausted(properties.getPool().isBlockWhenExhausted()); poolConfig.setJmxEnabled(properties.getPool().isJmxEnabled()); poolConfig.setJmxNamePrefix(properties.getPool().getJmxNamePrefix()); poolConfig.setJmxNameBase(properties.getPool().getJmxNameBase()); logger.debug(poolConfig.toString()); return poolConfig; } @Bean @ConditionalOnMissingBean public GatewayConfig gatewayConfig(RedisClusterPoolConfig poolConfig) { GatewayConfig gatewayConfig = new GatewayConfig(); gatewayConfig.setPoolConfig(poolConfig); gatewayConfig.setIpAddress(properties.getGateway().getIpAddress()); gatewayConfig.setDomainAddress(properties.getGateway().getDomainAddress()); gatewayConfig.setTimeoutMillisec(properties.getGateway().getTimeoutMillisec()); gatewayConfig.setHealthCheckPeriodSeconds(properties.getGateway().getHealthCheckPeriodSeconds()); gatewayConfig.setHealthCheckThreadSize(properties.getGateway().getHealthCheckThreadSize()); gatewayConfig.setHealthCheckUsed(properties.getGateway().isHealthCheckUsed()); gatewayConfig.setGatewaySelectorMethod(properties.getGateway().getGatewaySelectorMethod()); gatewayConfig.setZkAddress(properties.getGateway().getZkAddress()); gatewayConfig.setClusterName(properties.getGateway().getClusterName()); gatewayConfig.setMaxRetry(properties.getGateway().getMaxRetry()); gatewayConfig.setBackgroundPoolSize(properties.getGateway().getBackgroundPoolSize()); gatewayConfig.setClientSyncTimeUnitMillis(properties.getGateway().getClientSyncTimeUnitMillis()); gatewayConfig.setConnectPerDelayMillis(properties.getGateway().getConnectPerDelayMillis()); gatewayConfig.setAffinityUsed(properties.getGateway().isAffinityUsed()); gatewayConfig.setZkSessionTimeout(properties.getGateway().getZkSessionTimeout()); gatewayConfig.setZkConnectTimeout(properties.getGateway().getZkConnectTimeout()); logger.debug(gatewayConfig.toString()); return gatewayConfig; } @Bean(destroyMethod = "destroy") @ConditionalOnMissingBean public RedisClusterConnectionFactory redisClusterConnectionFactory(GatewayConfig gatewayConfig) { logger.debug("Initialled redisClusterConnectionFactory zkAddress=" + gatewayConfig.getZkAddress() + ";clusterName=" + gatewayConfig.getClusterName()); RedisClusterConnectionFactory redisClusterConnectionFactory = new RedisClusterConnectionFactory(); redisClusterConnectionFactory.setConfig(gatewayConfig); return redisClusterConnectionFactory; } @Bean @ConditionalOnMissingBean public StringRedisClusterTemplate redisTemplate(RedisClusterConnectionFactory redisClusterConnectionFactory) { logger.debug("Init StringRedisClusterTemplate"); StringRedisClusterTemplate redisClusterTemplate = new StringRedisClusterTemplate(); redisClusterTemplate.setConnectionFactory(redisClusterConnectionFactory); return redisClusterTemplate; } }
Modify conditional
nbasearc-spring-boot-autoconfigure/src/main/java/com/navercorp/redis/cluster/spring/NBaseArcAutoConfiguration.java
Modify conditional
Java
apache-2.0
a1ec456bc4375108c60fc38130cda042ead8e453
0
auricgoldfinger/cgeo,superspindel/cgeo,superspindel/cgeo,auricgoldfinger/cgeo,cgeo/cgeo,matej116/cgeo,rsudev/c-geo-opensource,matej116/cgeo,S-Bartfast/cgeo,rsudev/c-geo-opensource,Bananeweizen/cgeo,cgeo/cgeo,mucek4/cgeo,mucek4/cgeo,S-Bartfast/cgeo,mucek4/cgeo,auricgoldfinger/cgeo,tobiasge/cgeo,pstorch/cgeo,pstorch/cgeo,kumy/cgeo,cgeo/cgeo,S-Bartfast/cgeo,cgeo/cgeo,tobiasge/cgeo,Bananeweizen/cgeo,superspindel/cgeo,kumy/cgeo,kumy/cgeo,matej116/cgeo,pstorch/cgeo,tobiasge/cgeo,Bananeweizen/cgeo,rsudev/c-geo-opensource
package cgeo.geocaching.utils; import cgeo.geocaching.CgeoApplication; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.support.annotation.StringRes; import io.reactivex.disposables.CompositeDisposable; import io.reactivex.disposables.Disposable; /** * Handler with a dispose policy. Once disposed, the handler will not handle * any more dispose or regular message. */ public abstract class DisposableHandler extends Handler implements Disposable { public static final int DONE = -1000; protected static final int UPDATE_LOAD_PROGRESS_DETAIL = 42186; private final CompositeDisposable disposables = new CompositeDisposable(); public DisposableHandler(final Looper serviceLooper) { super(serviceLooper); } public DisposableHandler() { super(); } private static class CancelHolder { // CANCEL is used to synchronously dispose the DisposableHandler and call // the appropriate callback. static final int CANCEL = -1; // When dispose() has been called, CANCEL_CALLBACK is used to synchronously // call the appropriate callback. static final int CANCEL_CALLBACK = -2; final int kind; CancelHolder(final int kind) { this.kind = kind; } } @Override public final void handleMessage(final Message message) { if (message.obj instanceof CancelHolder) { final CancelHolder holder = (CancelHolder) message.obj; if (holder.kind == CancelHolder.CANCEL && !isDisposed()) { disposables.dispose(); handleDispose(); } else if (holder.kind == CancelHolder.CANCEL_CALLBACK) { // We have been disposed already but the callback has not been called yet. handleDispose(); } } else if (!isDisposed()) { handleRegularMessage(message); } } /** * Add a disposable to the list of disposables to be disposed at disposition time. */ public final void add(final Disposable disposable) { disposables.add(disposable); } /** * Handle a non-dispose message.<br> * Subclasses must implement this to handle messages. * * @param message * the message to handle */ protected abstract void handleRegularMessage(final Message message); /** * Handle a dispose message. * * This is called on the handler looper thread when the handler gets disposed. */ protected void handleDispose() { // May be overwritten by inheriting classes. } /** * Get a dispose message that can later be sent to this handler to dispose it. * * @return a message that, when sent, will dispose the current handler. */ public Message disposeMessage() { return obtainMessage(0, new CancelHolder(CancelHolder.CANCEL)); } /** * Cancel the current handler. This can be called from any thread. The disposables * added with {@link #add(Disposable)} will be disposed immediately, while the * {@link #handleDispose()} callback will be called synchronously by the handler. */ public void dispose() { disposables.dispose(); obtainMessage(0, new CancelHolder(CancelHolder.CANCEL_CALLBACK)).sendToTarget(); } /** * Check if the current handler has been disposed. * * @return true if the handler has been disposed */ public boolean isDisposed() { return disposables.isDisposed(); } /** * Check if a handler has been disposed. * * @param handler * a handler, or null * @return true if the handler is not null and has been disposed */ public static boolean isDisposed(final DisposableHandler handler) { return handler != null && handler.isDisposed(); } public static void sendLoadProgressDetail(final Handler handler, @StringRes final int resourceId) { if (handler != null) { handler.obtainMessage(UPDATE_LOAD_PROGRESS_DETAIL, CgeoApplication.getInstance().getString(resourceId)).sendToTarget(); } } }
main/src/cgeo/geocaching/utils/DisposableHandler.java
package cgeo.geocaching.utils; import cgeo.geocaching.CgeoApplication; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.support.annotation.StringRes; import io.reactivex.disposables.CompositeDisposable; import io.reactivex.disposables.Disposable; /** * Handler with a dispose policy. Once disposed, the handler will not handle * any more dispose or regular message. */ public abstract class DisposableHandler extends Handler implements Disposable { public static final int DONE = -1000; protected static final int UPDATE_LOAD_PROGRESS_DETAIL = 42186; private final CompositeDisposable disposables = new CompositeDisposable(); public DisposableHandler(final Looper serviceLooper) { super(serviceLooper); } public DisposableHandler() { super(); } private static class CancelHolder { static final int CANCEL = -1; static final int CANCEL_CALLBACK = -2; final int kind; CancelHolder(final int kind) { this.kind = kind; } } @Override public final void handleMessage(final Message message) { if (message.obj instanceof CancelHolder) { final CancelHolder holder = (CancelHolder) message.obj; if (holder.kind == CancelHolder.CANCEL && !isDisposed()) { disposables.dispose(); handleDispose(); } else if (holder.kind == CancelHolder.CANCEL_CALLBACK) { handleDispose(); } } else if (!isDisposed()) { handleRegularMessage(message); } } /** * Add a disposable to the list of disposables to be disposed at disposition time. */ public final void add(final Disposable disposable) { disposables.add(disposable); } /** * Handle a non-dispose message.<br> * Subclasses must implement this to handle messages. * * @param message * the message to handle */ protected abstract void handleRegularMessage(final Message message); /** * Handle a dispose message. * */ protected void handleDispose() { } /** * Get a dispose message that can later be sent to this handler to dispose it. * * @return a message that, when sent, will dispose the current handler. */ public Message disposeMessage() { return obtainMessage(0, new CancelHolder(CancelHolder.CANCEL)); } /** * Cancel the current handler. This can be called from any thread. The disposables * added with {@link #add(Disposable)} will be disposed immediately, while the * {@link #handleDispose()} callback will be called synchronously by the handler. */ public void dispose() { disposables.dispose(); obtainMessage(0, new CancelHolder(CancelHolder.CANCEL_CALLBACK)).sendToTarget(); } /** * Check if the current handler has been disposed. * * @return true if the handler has been disposed */ public boolean isDisposed() { return disposables.isDisposed(); } /** * Check if a handler has been disposed. * * @param handler * a handler, or null * @return true if the handler is not null and has been disposed */ public static boolean isDisposed(final DisposableHandler handler) { return handler != null && handler.isDisposed(); } public static void sendLoadProgressDetail(final Handler handler, @StringRes final int resourceId) { if (handler != null) { handler.obtainMessage(UPDATE_LOAD_PROGRESS_DETAIL, CgeoApplication.getInstance().getString(resourceId)).sendToTarget(); } } }
Add explanations
main/src/cgeo/geocaching/utils/DisposableHandler.java
Add explanations
Java
artistic-2.0
570a9fe52f703bc6b324a7cb40c3c4c2b7b42ba0
0
jdownloader-mirror/appwork-utils
/** * Copyright (c) 2009 - 2011 AppWork UG(haftungsbeschränkt) <[email protected]> * * This file is part of org.appwork.utils.net.ftpserver * * This software is licensed under the Artistic License 2.0, * see the LICENSE file or http://www.opensource.org/licenses/artistic-license-2.0.php * for details */ package org.appwork.utils.net.ftpserver; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.util.ArrayList; import org.appwork.controlling.State; import org.appwork.controlling.StateConflictException; import org.appwork.controlling.StateMachine; import org.appwork.controlling.StateMachineInterface; import org.appwork.utils.Regex; import org.appwork.utils.logging.Log; /** * @author daniel * */ public class FtpConnection implements Runnable, StateMachineInterface { public static enum COMMAND { /* commands starting with X are experimental, see RFC1123 */ ABOR(true, 0), REST(true, 1), RNTO(true, 1, -1), RNFR(true, 1, -1), DELE(true, 1, -1), XRMD(true, 1, -1), RMD(true, 1, -1), SIZE(true, 1, -1), /* rfc3659 */ STRU(true, 1), MODE(true, 1), ALLO(true, 1, -1), APPE(true, 1, -1), STOR(true, 1, -1), XMKD(true, 1, -1), MKD(true, 1, -1), NLST(true, 1, -1), EPRT(true, 1, 1), /* RFC 2428 */ EPSV(true, 0), /* RFC 2428 */ RETR(true, 1, -1), TYPE(true, 1, 2), LIST(true, 0, 1), XCUP(true, 0), CDUP(true, 0), XCWD(true, 1, -1), CWD(true, 1, -1), XPWD(true, 0), PWD(true, 0), NOOP(false, 0), PASV(true, 0), PASS(false, 1), QUIT(true, 0), SYST(true, 0), PORT(true, 1), USER(false, 1); private int paramSize; private int maxSize; private boolean needLogin; private COMMAND(final boolean needLogin, final int paramSize) { this(needLogin, paramSize, paramSize); } private COMMAND(final boolean needLogin, final int paramSize, final int maxSize) { this.paramSize = paramSize; this.needLogin = needLogin; this.maxSize = maxSize; } public boolean match(final int length) { if (length == this.paramSize) { return true; } if (length == this.maxSize) { return true; } if (this.maxSize == -1) { return true; } return false; } public boolean needsLogin() { return this.needLogin; } } private static enum TYPE { ASCII, BINARY; } private static final State IDLE = new State("IDLE"); private static final State USER = new State("USER"); private static final State PASS = new State("USER"); private static final State LOGIN = new State("USER"); private static final State LOGOUT = new State("LOGOUT"); private static final State IDLEEND = new State("IDLEEND"); static { FtpConnection.IDLE.addChildren(FtpConnection.USER); FtpConnection.USER.addChildren(FtpConnection.PASS, FtpConnection.LOGIN, FtpConnection.LOGOUT); FtpConnection.PASS.addChildren(FtpConnection.LOGIN, FtpConnection.LOGOUT); FtpConnection.LOGIN.addChildren(FtpConnection.LOGOUT); FtpConnection.LOGOUT.addChildren(FtpConnection.IDLEEND); } private final FtpServer ftpServer; private final Socket controlSocket; private BufferedReader reader; private BufferedWriter writer; private StateMachine stateMachine = null; private Thread thread = null; private String passiveIP = null; private int passivePort = 0; private TYPE type = TYPE.BINARY; private final FtpConnectionState connectionState; private Socket dataSocket = null; private ServerSocket serverSocket = null; /** * @param ftpServer * @param clientSocket * @throws IOException */ public FtpConnection(final FtpServer ftpServer, final Socket clientSocket) throws IOException { this.stateMachine = new StateMachine(this, FtpConnection.IDLE, FtpConnection.IDLEEND); this.connectionState = ftpServer.getFtpCommandHandler().createNewConnectionState(); this.ftpServer = ftpServer; this.controlSocket = clientSocket; this.controlSocket.setSoTimeout(20 * 1000); try { this.reader = new BufferedReader(new InputStreamReader(this.controlSocket.getInputStream())); this.writer = new BufferedWriter(new OutputStreamWriter(this.controlSocket.getOutputStream())); this.thread = new Thread(ftpServer.getThreadGroup(), this) { @Override public void interrupt() { /* also close all connections on interrupt */ FtpConnection.this.closeDataConnection(); try { FtpConnection.this.controlSocket.close(); } catch (final Throwable e) { } super.interrupt(); } }; this.thread.setName("FTPConnectionThread: " + this); this.thread.start(); } catch (final IOException e) { try { this.controlSocket.close(); } catch (final Throwable e2) { } this.closeDataConnection(); throw e; } } private String buildParameter(final String[] commandParts) { if (commandParts == null) { return null; } String param = ""; for (int index = 1; index < commandParts.length; index++) { if (param.length() > 0) { param += " "; } param += commandParts[index]; } return param; } private void closeDataConnection() { try { this.dataSocket.close(); } catch (final Throwable e) { } finally { this.dataSocket = null; } try { this.serverSocket.close(); } catch (final Throwable e) { } finally { this.serverSocket = null; } } public StateMachine getStateMachine() { return this.stateMachine; } /** * @param command * @throws IOException */ private void handleCommand(final String command) throws IOException { try { final String commandParts[] = command.split(" "); COMMAND commandEnum = null; try { commandEnum = COMMAND.valueOf(commandParts[0]); } catch (final IllegalArgumentException e) { commandEnum = null; } try { if (commandEnum != null) { if (commandEnum.needLogin) { /* checks if this command needs valid login */ if (!this.stateMachine.isState(FtpConnection.LOGIN)) { throw new FtpNotLoginException(); } } if (!commandEnum.match(commandParts.length - 1)) { /* checks if the parameter syntax is okay */ throw new FtpCommandSyntaxException(); } /* this checks RNFR,RNTO command sequence */ if (this.connectionState.getRenameFile() != null && !commandEnum.equals(COMMAND.RNTO)) { /* when renameFile is set, a RNTO command MUST follow */ this.connectionState.setRenameFile(null); throw new FtpBadSequenceException(); } switch (commandEnum) { case ABOR: this.onABOR(); break; case REST: this.onREST(commandParts); break; case PASV: this.onPASV(); break; case RNTO: this.onRNTO(commandParts); break; case RNFR: this.onRNFR(commandParts); break; case XRMD: case RMD: this.onRMD(commandParts); break; case DELE: this.onDELE(commandParts); break; case SIZE: this.onSIZE(commandParts); break; case STRU: this.onSTRU(commandParts); break; case MODE: this.onMODE(commandParts); break; case ALLO: this.onALLO(); break; case APPE: this.onSTOR(commandParts, true); break; case STOR: this.onSTOR(commandParts, false); break; case XMKD: case MKD: this.onMKD(commandParts); break; case NLST: this.onNLST(commandParts); break; case EPSV: this.onEPSV(commandParts); break; case EPRT: this.onEPRT(commandParts); break; case RETR: this.onRETR(commandParts); break; case LIST: this.onLIST(commandParts); break; case USER: this.onUSER(commandParts); break; case PORT: this.onPORT(commandParts); break; case SYST: this.onSYST(); break; case QUIT: this.onQUIT(); break; case PASS: this.onPASS(commandParts); break; case NOOP: this.onNOOP(); break; case XPWD: case PWD: this.onPWD(); break; case XCWD: case CWD: this.onCWD(commandParts); break; case XCUP: case CDUP: this.onCDUP(); break; case TYPE: this.onTYPE(commandParts); break; } } else { throw new FtpCommandNotImplementedException(); } } catch (final StateConflictException e) { throw new FtpBadSequenceException(); } } catch (final FtpException e) { this.write(e.getCode(), e.getMessage()); } catch (final Throwable e) { this.write(550, e.getMessage()); } } /** * @throws IOException * */ private void onABOR() throws IOException { this.write(226, "Command okay"); } private void onALLO() throws IOException { this.write(200, "Command okay"); } private void onCDUP() throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().onDirectoryUp(this.connectionState); this.write(200, "Command okay."); } private void onCWD(final String params[]) throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().setCurrentDirectory(this.connectionState, this.buildParameter(params)); // this.write(250, "\"" + this.connectionState.getCurrentDir() + // "\" is cwd."); this.write(250, "Directory successfully changed."); } /** * @param commandParts * @throws FtpException * @throws FtpFileNotExistException * @throws IOException */ private void onDELE(final String[] commandParts) throws FtpFileNotExistException, FtpException, IOException { this.ftpServer.getFtpCommandHandler().removeFile(this.connectionState, this.buildParameter(commandParts)); this.write(250, "\"" + this.buildParameter(commandParts) + "\" removed."); } /** * @param commandParts * @throws IOException */ /** * RFC2428 * * @throws FtpException * * @throws FtpNotLoginException **/ private void onEPRT(final String[] commandParts) throws IOException, FtpException { final String parts[] = commandParts[1].split("\\|"); this.closeDataConnection(); if (parts.length != 4) { throw new FtpCommandSyntaxException(); } if (!"1".equals(parts[1])) { /* 2 equals IPV6 */ throw new FtpException(522, "Network protocol not supported, use (1)"); } this.passiveIP = parts[2]; this.passivePort = Integer.parseInt(parts[3]); this.write(200, "PORT command successful"); } /** * @param commandParts * @throws FtpException */ private void onEPSV(final String[] commandParts) throws FtpException { boolean okay = false; this.closeDataConnection(); try { this.serverSocket = new ServerSocket(); SocketAddress socketAddress = null; if (this.ftpServer.isLocalhostOnly()) { /* bind socket to localhost */ socketAddress = new InetSocketAddress(this.ftpServer.getLocalHost(), 0); } this.serverSocket.bind(socketAddress); okay = true; final int port = this.serverSocket.getLocalPort(); this.write(229, "Entering Extended Passive Mode (|||" + port + "|)"); return; } catch (final IOException e) { throw new FtpException(421, "could not open port"); } finally { if (!okay) { this.closeDataConnection(); } } } private void onLIST(final String params[]) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } this.write(150, "Opening XY mode data connection for file list"); try { final ArrayList<? extends FtpFile> list = this.ftpServer.getFtpCommandHandler().getFileList(this.connectionState, this.buildParameter(params)); this.dataSocket.getOutputStream().write(this.ftpServer.getFtpCommandHandler().formatFileList(list).getBytes("UTF-8")); this.dataSocket.getOutputStream().flush(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final Exception e) { throw new FtpException(451, "Requested action aborted: local error in processing"); } /* we close the passive port after command */ this.write(226, "Transfer complete."); } finally { this.closeDataConnection(); } } /** * @param commandParts * @throws IOException * @throws FtpFileNotExistException */ private void onMKD(final String[] commandParts) throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().makeDirectory(this.connectionState, this.buildParameter(commandParts)); this.write(257, "\"" + this.buildParameter(commandParts) + "\" created."); } private void onMODE(final String[] commandParts) throws IOException, FtpCommandParameterException { if ("S".equalsIgnoreCase(commandParts[1])) { this.write(200, "Command okay."); } else { throw new FtpCommandParameterException(); } } /** * @param commandParts * @throws IOException * @throws FtpException */ private void onNLST(final String[] commandParts) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } this.write(150, "Opening XY mode data connection for file list"); try { final ArrayList<? extends FtpFile> list = this.ftpServer.getFtpCommandHandler().getFileList(this.connectionState, this.buildParameter(commandParts)); final StringBuilder sb = new StringBuilder(); for (final FtpFile file : list) { sb.append(file.getName()); sb.append("\r\n"); } this.dataSocket.getOutputStream().write(sb.toString().getBytes("UTF-8")); this.dataSocket.getOutputStream().flush(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final Exception e) { throw new FtpException(451, "Requested action aborted: local error in processing"); } /* we close the passive port after command */ this.write(226, "Transfer complete."); } finally { this.closeDataConnection(); } } private void onNOOP() throws IOException { this.write(200, "Command okay"); } private void onPASS(final String params[]) throws IOException, FtpException { this.stateMachine.setStatus(FtpConnection.PASS); if (this.connectionState.getUser() == null) { throw new FtpBadSequenceException(); } else { if (this.connectionState.getUser().getPassword() != null) { if (this.connectionState.getUser().getPassword().equals(params[1])) { final String message = this.ftpServer.getFtpCommandHandler().onLoginSuccessRequest(this.connectionState); if (message != null) { this.write(230, message, true); } this.write(230, "User logged in, proceed"); this.stateMachine.setStatus(FtpConnection.LOGIN); } else { final String message = this.ftpServer.getFtpCommandHandler().onLoginFailedMessage(this.connectionState); if (message != null) { this.write(530, message, true); } this.stateMachine.setStatus(FtpConnection.LOGOUT); this.stateMachine.setStatus(FtpConnection.IDLEEND); this.stateMachine.reset(); throw new FtpNotLoginException(); } } else { throw new RuntimeException("THIS MUST NOT HAPPEN!"); } } } /** * @throws FtpException * @throws IOException * */ private void onPASV() throws FtpException { boolean okay = false; this.closeDataConnection(); try { this.serverSocket = new ServerSocket(); SocketAddress socketAddress = null; if (this.ftpServer.isLocalhostOnly()) { /* bind socket to localhost */ socketAddress = new InetSocketAddress(this.ftpServer.getLocalHost(), 0); } this.serverSocket.bind(socketAddress); okay = true; final int port = this.serverSocket.getLocalPort(); final int p1 = port / 256; final int p2 = port - p1 * 256; if (this.ftpServer.isLocalhostOnly()) { /* localhost only */ this.write(227, "Entering Passive Mode. (127,0,0,1," + p1 + "," + p2 + ")."); } else { if (this.controlSocket.getLocalAddress().isLoopbackAddress()) { this.write(227, "Entering Passive Mode. (127,0,0,1," + p1 + "," + p2 + ")."); } else { String ip = this.controlSocket.getLocalAddress().getHostAddress(); ip = ip.replaceAll("\\.", ","); this.write(227, "Entering Passive Mode. (" + ip + "," + p1 + "," + p2 + ")."); } } return; } catch (final IOException e) { throw new FtpException(421, "could not open port"); } finally { if (!okay) { this.closeDataConnection(); } } } private void onPORT(final String params[]) throws IOException, FtpCommandSyntaxException { try { /* close old maybe existing data connection */ this.dataSocket.close(); } catch (final Throwable e) { } finally { this.dataSocket = null; } final String parts[] = params[1].split(","); if (parts.length != 6) { throw new FtpCommandSyntaxException(); } this.passiveIP = parts[0] + "." + parts[1] + "." + parts[2] + "." + parts[3]; this.passivePort = Integer.parseInt(parts[4]) * 256 + Integer.parseInt(parts[5]); this.write(200, "PORT command successful"); } private void onPWD() throws IOException, FtpException { this.write(257, "\"" + this.connectionState.getCurrentDir() + "\" is cwd."); } private void onQUIT() throws IOException, FtpException { this.stateMachine.setStatus(FtpConnection.LOGOUT); this.write(221, this.ftpServer.getFtpCommandHandler().onLogoutRequest(this.connectionState)); this.stateMachine.setStatus(FtpConnection.IDLEEND); } /** * @param commandParts * @throws FtpException * @throws IOException */ private void onREST(final String[] commandParts) throws FtpException, IOException { try { final long position = Long.parseLong(commandParts[1]); this.ftpServer.getFtpCommandHandler().onREST(this.connectionState, position); this.write(350, "Restarting at " + position + ". Send STORE or RETRIEVE"); } catch (final NumberFormatException e) { this.write(554, "Requested action not taken: invalid REST parameter."); } } private void onRETR(final String[] commandParts) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } /* * we need to make sure that the file exists before opening data * connection, see http://cr.yp.to/ftp/retr.html, RFC 959 * * this will cause the 550 file not found before opening the data * connection */ this.ftpServer.getFtpCommandHandler().getSize(this.connectionState, this.buildParameter(commandParts)); this.write(150, "Opening XY mode data connection for transfer"); long bytesWritten = 0; try { bytesWritten = this.ftpServer.getFtpCommandHandler().onRETR(this.dataSocket.getOutputStream(), this.connectionState, this.buildParameter(commandParts)); this.dataSocket.getOutputStream().flush(); this.dataSocket.shutdownOutput(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final IOException e) { throw new FtpException(426, e.getMessage()); } catch (final Exception e) { throw new FtpException(451, e.getMessage()); } /* we close the passive port after command */ this.write(226, "Transfer complete. " + bytesWritten + " bytes transfered!"); } finally { this.closeDataConnection(); } } /** * @param commandParts * @throws IOException * @throws FtpException * @throws FtpFileNotExistException */ private void onRMD(final String[] commandParts) throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().removeDirectory(this.connectionState, this.buildParameter(commandParts)); this.write(250, "\"" + this.buildParameter(commandParts) + "\" removed."); } /** * @param commandParts * @throws FtpBadSequenceException * @throws FtpFileNotExistException * @throws IOException */ private void onRNFR(final String[] commandParts) throws FtpException, IOException { if (this.connectionState.getRenameFile() != null) { this.connectionState.setRenameFile(null); throw new FtpBadSequenceException(); } try { this.ftpServer.getFtpCommandHandler().renameFile(this.connectionState, this.buildParameter(commandParts)); } catch (final FtpException e) { this.connectionState.setRenameFile(null); throw e; } this.write(350, "\"" + this.buildParameter(commandParts) + "\" rename pending."); } /** * @param commandParts * @throws FtpBadSequenceException * @throws FtpFileNotExistException * @throws IOException */ private void onRNTO(final String[] commandParts) throws IOException, FtpException { if (this.connectionState.getRenameFile() == null) { /* a renameFile must exist, RNFR must be the command before RNTO */ this.connectionState.setRenameFile(null); throw new FtpBadSequenceException(); } try { this.ftpServer.getFtpCommandHandler().renameFile(this.connectionState, this.buildParameter(commandParts)); } finally { this.connectionState.setRenameFile(null); } this.write(250, "\"" + this.buildParameter(commandParts) + "\" rename successful."); } /** * @param commandParts * @throws IOException * @throws FtpFileNotExistException */ private void onSIZE(final String[] commandParts) throws FtpException, IOException { this.write(213, "" + this.ftpServer.getFtpCommandHandler().getSize(this.connectionState, this.buildParameter(commandParts))); } private void onSTOR(final String[] commandParts, final boolean append) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } this.write(150, "Opening XY mode data connection for transfer"); long bytesRead = 0; try { bytesRead = this.ftpServer.getFtpCommandHandler().onSTOR(this.dataSocket.getInputStream(), this.connectionState, append, this.buildParameter(commandParts)); this.dataSocket.shutdownInput(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final IOException e) { throw new FtpException(426, e.getMessage()); } catch (final Exception e) { throw new FtpException(451, e.getMessage()); } /* we close the passive port after command */ this.write(226, "Transfer complete. " + bytesRead + " bytes received!"); } finally { this.closeDataConnection(); } } private void onSTRU(final String[] commandParts) throws IOException, FtpCommandParameterException { if ("F".equalsIgnoreCase(commandParts[1])) { this.write(200, "Command okay."); } else { throw new FtpCommandParameterException(); } } private void onSYST() throws IOException { this.write(215, "UNIX Type: L8"); } private void onTYPE(final String[] commandParts) throws IOException, FtpCommandParameterException { final String type = commandParts[1]; if ("A".equalsIgnoreCase(type)) { this.type = TYPE.ASCII; } else if ("I".equalsIgnoreCase(type)) { this.type = TYPE.BINARY; } else if ("L".equalsIgnoreCase(type)) { if (commandParts.length == 3 && "8".equals(commandParts[2])) { this.type = TYPE.BINARY; } else { throw new FtpCommandParameterException(); } } else { throw new FtpCommandParameterException(); } this.write(200, "Command okay"); } private void onUSER(final String params[]) throws IOException, FtpException { if (this.stateMachine.isFinal()) { this.stateMachine.reset(); } this.stateMachine.setStatus(FtpConnection.USER); this.connectionState.setUser(this.ftpServer.getFtpCommandHandler().getUser(params[1])); if (this.connectionState.getUser() != null) { if (this.connectionState.getUser().getPassword() == null) { final String message = this.ftpServer.getFtpCommandHandler().onLoginSuccessRequest(this.connectionState); if (message != null) { this.write(230, message, true); } this.write(230, "User logged in, proceed"); this.stateMachine.setStatus(FtpConnection.LOGIN); } else { this.write(331, "User name okay, need password"); } } else { final String message = this.ftpServer.getFtpCommandHandler().onLoginFailedMessage(this.connectionState); if (message != null) { this.write(530, message, true); } this.stateMachine.setStatus(FtpConnection.LOGOUT); this.stateMachine.setStatus(FtpConnection.IDLEEND); this.stateMachine.reset(); throw new FtpNotLoginException(); } } private void openDataConnection() throws IOException { if (this.dataSocket == null || !this.dataSocket.isConnected()) { if (this.serverSocket != null && this.serverSocket.isBound()) { /* PASV */ this.dataSocket = this.serverSocket.accept(); } else { /* PORT */ this.dataSocket = new Socket(this.passiveIP, this.passivePort); } } } public void run() { try { this.writeMultiLineAuto(220, this.ftpServer.getFtpCommandHandler().getWelcomeMessage(this.connectionState)); while (true) { final String command = this.reader.readLine(); if (command == null) { break; } if (this.ftpServer.isDebug()) { Log.L.info("REQ: " + command); } this.handleCommand(command); } } catch (final IOException e) { try { this.onQUIT(); } catch (final IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (final FtpException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } finally { this.closeDataConnection(); try { this.controlSocket.close(); } catch (final Throwable e2) { } } } private void write(final int code, final String message) throws IOException { if (this.ftpServer.isDebug()) { Log.L.info("RESP: " + code + " " + message); } this.write(code, message, false); } private void write(final int code, final String message, final boolean multiLine) throws IOException { if (multiLine) { this.writer.write(code + "-" + message + "\r\n"); } else { this.writer.write(code + " " + message + "\r\n"); } this.writer.flush(); } private void writeMultiLineAuto(final int code, final String message) throws IOException { final String lines[] = Regex.getLines(message); if (lines != null) { for (int line = 0; line < lines.length; line++) { if (line == lines.length - 1) { this.writer.write(code + " " + lines[line] + "\r\n"); } else { this.writer.write(code + "-" + lines[line] + "\r\n"); } } } this.writer.flush(); } }
src/org/appwork/utils/net/ftpserver/FtpConnection.java
/** * Copyright (c) 2009 - 2011 AppWork UG(haftungsbeschränkt) <[email protected]> * * This file is part of org.appwork.utils.net.ftpserver * * This software is licensed under the Artistic License 2.0, * see the LICENSE file or http://www.opensource.org/licenses/artistic-license-2.0.php * for details */ package org.appwork.utils.net.ftpserver; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.util.ArrayList; import org.appwork.controlling.State; import org.appwork.controlling.StateConflictException; import org.appwork.controlling.StateMachine; import org.appwork.controlling.StateMachineInterface; import org.appwork.utils.Regex; import org.appwork.utils.logging.Log; /** * @author daniel * */ public class FtpConnection implements Runnable, StateMachineInterface { public static enum COMMAND { /* commands starting with X are experimental, see RFC1123 */ ABOR(true, 0), REST(true, 1), RNTO(true, 1, -1), RNFR(true, 1, -1), DELE(true, 1, -1), XRMD(true, 1, -1), RMD(true, 1, -1), SIZE(true, 1, -1), /* rfc3659 */ STRU(true, 1), MODE(true, 1), ALLO(true, 1, -1), APPE(true, 1, -1), STOR(true, 1, -1), XMKD(true, 1, -1), MKD(true, 1, -1), NLST(true, 1, -1), EPRT(true, 1, 1), /* RFC 2428 */ EPSV(true, 0), /* RFC 2428 */ RETR(true, 1, -1), TYPE(true, 1, 2), LIST(true, 0, 1), XCUP(true, 0), CDUP(true, 0), XCWD(true, 1, -1), CWD(true, 1, -1), XPWD(true, 0), PWD(true, 0), NOOP(false, 0), PASV(true, 0), PASS(false, 1), QUIT(true, 0), SYST(true, 0), PORT(true, 1), USER(false, 1); private int paramSize; private int maxSize; private boolean needLogin; private COMMAND(final boolean needLogin, final int paramSize) { this(needLogin, paramSize, paramSize); } private COMMAND(final boolean needLogin, final int paramSize, final int maxSize) { this.paramSize = paramSize; this.needLogin = needLogin; this.maxSize = maxSize; } public boolean match(final int length) { if (length == this.paramSize) { return true; } if (length == this.maxSize) { return true; } if (this.maxSize == -1) { return true; } return false; } public boolean needsLogin() { return this.needLogin; } } private static enum TYPE { ASCII, BINARY; } private static final State IDLE = new State("IDLE"); private static final State USER = new State("USER"); private static final State PASS = new State("USER"); private static final State LOGIN = new State("USER"); private static final State LOGOUT = new State("LOGOUT"); private static final State IDLEEND = new State("IDLEEND"); static { FtpConnection.IDLE.addChildren(FtpConnection.USER); FtpConnection.USER.addChildren(FtpConnection.PASS, FtpConnection.LOGIN, FtpConnection.LOGOUT); FtpConnection.PASS.addChildren(FtpConnection.LOGIN, FtpConnection.LOGOUT); FtpConnection.LOGIN.addChildren(FtpConnection.LOGOUT); FtpConnection.LOGOUT.addChildren(FtpConnection.IDLEEND); } private final FtpServer ftpServer; private final Socket controlSocket; private BufferedReader reader; private BufferedWriter writer; private StateMachine stateMachine = null; private Thread thread = null; private String passiveIP = null; private int passivePort = 0; private TYPE type = TYPE.BINARY; private final FtpConnectionState connectionState; private Socket dataSocket = null; private ServerSocket serverSocket = null; /** * @param ftpServer * @param clientSocket * @throws IOException */ public FtpConnection(final FtpServer ftpServer, final Socket clientSocket) throws IOException { this.stateMachine = new StateMachine(this, FtpConnection.IDLE, FtpConnection.IDLEEND); this.connectionState = ftpServer.getFtpCommandHandler().createNewConnectionState(); this.ftpServer = ftpServer; this.controlSocket = clientSocket; try { this.reader = new BufferedReader(new InputStreamReader(this.controlSocket.getInputStream())); this.writer = new BufferedWriter(new OutputStreamWriter(this.controlSocket.getOutputStream())); this.thread = new Thread(ftpServer.getThreadGroup(), this) { @Override public void interrupt() { /* also close all connections on interrupt */ FtpConnection.this.closeDataConnection(); try { FtpConnection.this.controlSocket.close(); } catch (final Throwable e) { } super.interrupt(); } }; this.thread.setName("FTPConnectionThread: " + this); this.thread.start(); } catch (final IOException e) { try { this.controlSocket.close(); } catch (final Throwable e2) { } this.closeDataConnection(); throw e; } } private String buildParameter(final String[] commandParts) { if (commandParts == null) { return null; } String param = ""; for (int index = 1; index < commandParts.length; index++) { if (param.length() > 0) { param += " "; } param += commandParts[index]; } return param; } private void closeDataConnection() { try { this.dataSocket.close(); } catch (final Throwable e) { } finally { this.dataSocket = null; } try { this.serverSocket.close(); } catch (final Throwable e) { } finally { this.serverSocket = null; } } public StateMachine getStateMachine() { return this.stateMachine; } /** * @param command * @throws IOException */ private void handleCommand(final String command) throws IOException { try { final String commandParts[] = command.split(" "); COMMAND commandEnum = null; try { commandEnum = COMMAND.valueOf(commandParts[0]); } catch (final IllegalArgumentException e) { commandEnum = null; } try { if (commandEnum != null) { if (commandEnum.needLogin) { /* checks if this command needs valid login */ if (!this.stateMachine.isState(FtpConnection.LOGIN)) { throw new FtpNotLoginException(); } } if (!commandEnum.match(commandParts.length - 1)) { /* checks if the parameter syntax is okay */ throw new FtpCommandSyntaxException(); } /* this checks RNFR,RNTO command sequence */ if (this.connectionState.getRenameFile() != null && !commandEnum.equals(COMMAND.RNTO)) { /* when renameFile is set, a RNTO command MUST follow */ this.connectionState.setRenameFile(null); throw new FtpBadSequenceException(); } switch (commandEnum) { case ABOR: this.onABOR(); break; case REST: this.onREST(commandParts); break; case PASV: this.onPASV(); break; case RNTO: this.onRNTO(commandParts); break; case RNFR: this.onRNFR(commandParts); break; case XRMD: case RMD: this.onRMD(commandParts); break; case DELE: this.onDELE(commandParts); break; case SIZE: this.onSIZE(commandParts); break; case STRU: this.onSTRU(commandParts); break; case MODE: this.onMODE(commandParts); break; case ALLO: this.onALLO(); break; case APPE: this.onSTOR(commandParts, true); break; case STOR: this.onSTOR(commandParts, false); break; case XMKD: case MKD: this.onMKD(commandParts); break; case NLST: this.onNLST(commandParts); break; case EPSV: this.onEPSV(commandParts); break; case EPRT: this.onEPRT(commandParts); break; case RETR: this.onRETR(commandParts); break; case LIST: this.onLIST(commandParts); break; case USER: this.onUSER(commandParts); break; case PORT: this.onPORT(commandParts); break; case SYST: this.onSYST(); break; case QUIT: this.onQUIT(); break; case PASS: this.onPASS(commandParts); break; case NOOP: this.onNOOP(); break; case XPWD: case PWD: this.onPWD(); break; case XCWD: case CWD: this.onCWD(commandParts); break; case XCUP: case CDUP: this.onCDUP(); break; case TYPE: this.onTYPE(commandParts); break; } } else { throw new FtpCommandNotImplementedException(); } } catch (final StateConflictException e) { throw new FtpBadSequenceException(); } } catch (final FtpException e) { this.write(e.getCode(), e.getMessage()); } catch (final Throwable e) { this.write(550, e.getMessage()); } } /** * @throws IOException * */ private void onABOR() throws IOException { this.write(226, "Command okay"); } private void onALLO() throws IOException { this.write(200, "Command okay"); } private void onCDUP() throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().onDirectoryUp(this.connectionState); this.write(200, "Command okay."); } private void onCWD(final String params[]) throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().setCurrentDirectory(this.connectionState, this.buildParameter(params)); // this.write(250, "\"" + this.connectionState.getCurrentDir() + // "\" is cwd."); this.write(250, "Directory successfully changed."); } /** * @param commandParts * @throws FtpException * @throws FtpFileNotExistException * @throws IOException */ private void onDELE(final String[] commandParts) throws FtpFileNotExistException, FtpException, IOException { this.ftpServer.getFtpCommandHandler().removeFile(this.connectionState, this.buildParameter(commandParts)); this.write(250, "\"" + this.buildParameter(commandParts) + "\" removed."); } /** * @param commandParts * @throws IOException */ /** * RFC2428 * * @throws FtpException * * @throws FtpNotLoginException **/ private void onEPRT(final String[] commandParts) throws IOException, FtpException { final String parts[] = commandParts[1].split("\\|"); this.closeDataConnection(); if (parts.length != 4) { throw new FtpCommandSyntaxException(); } if (!"1".equals(parts[1])) { /* 2 equals IPV6 */ throw new FtpException(522, "Network protocol not supported, use (1)"); } this.passiveIP = parts[2]; this.passivePort = Integer.parseInt(parts[3]); this.write(200, "PORT command successful"); } /** * @param commandParts * @throws FtpException */ private void onEPSV(final String[] commandParts) throws FtpException { boolean okay = false; this.closeDataConnection(); try { this.serverSocket = new ServerSocket(); SocketAddress socketAddress = null; if (this.ftpServer.isLocalhostOnly()) { /* bind socket to localhost */ socketAddress = new InetSocketAddress(this.ftpServer.getLocalHost(), 0); } this.serverSocket.bind(socketAddress); okay = true; final int port = this.serverSocket.getLocalPort(); this.write(229, "Entering Extended Passive Mode (|||" + port + "|)"); return; } catch (final IOException e) { throw new FtpException(421, "could not open port"); } finally { if (!okay) { this.closeDataConnection(); } } } private void onLIST(final String params[]) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } this.write(150, "Opening XY mode data connection for file list"); try { final ArrayList<? extends FtpFile> list = this.ftpServer.getFtpCommandHandler().getFileList(this.connectionState, this.buildParameter(params)); this.dataSocket.getOutputStream().write(this.ftpServer.getFtpCommandHandler().formatFileList(list).getBytes("UTF-8")); this.dataSocket.getOutputStream().flush(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final Exception e) { throw new FtpException(451, "Requested action aborted: local error in processing"); } /* we close the passive port after command */ this.write(226, "Transfer complete."); } finally { this.closeDataConnection(); } } /** * @param commandParts * @throws IOException * @throws FtpFileNotExistException */ private void onMKD(final String[] commandParts) throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().makeDirectory(this.connectionState, this.buildParameter(commandParts)); this.write(257, "\"" + this.buildParameter(commandParts) + "\" created."); } private void onMODE(final String[] commandParts) throws IOException, FtpCommandParameterException { if ("S".equalsIgnoreCase(commandParts[1])) { this.write(200, "Command okay."); } else { throw new FtpCommandParameterException(); } } /** * @param commandParts * @throws IOException * @throws FtpException */ private void onNLST(final String[] commandParts) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } this.write(150, "Opening XY mode data connection for file list"); try { final ArrayList<? extends FtpFile> list = this.ftpServer.getFtpCommandHandler().getFileList(this.connectionState, this.buildParameter(commandParts)); final StringBuilder sb = new StringBuilder(); for (final FtpFile file : list) { sb.append(file.getName()); sb.append("\r\n"); } this.dataSocket.getOutputStream().write(sb.toString().getBytes("UTF-8")); this.dataSocket.getOutputStream().flush(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final Exception e) { throw new FtpException(451, "Requested action aborted: local error in processing"); } /* we close the passive port after command */ this.write(226, "Transfer complete."); } finally { this.closeDataConnection(); } } private void onNOOP() throws IOException { this.write(200, "Command okay"); } private void onPASS(final String params[]) throws IOException, FtpException { this.stateMachine.setStatus(FtpConnection.PASS); if (this.connectionState.getUser() == null) { throw new FtpBadSequenceException(); } else { if (this.connectionState.getUser().getPassword() != null) { if (this.connectionState.getUser().getPassword().equals(params[1])) { final String message = this.ftpServer.getFtpCommandHandler().onLoginSuccessRequest(this.connectionState); if (message != null) { this.write(230, message, true); } this.write(230, "User logged in, proceed"); this.stateMachine.setStatus(FtpConnection.LOGIN); } else { final String message = this.ftpServer.getFtpCommandHandler().onLoginFailedMessage(this.connectionState); if (message != null) { this.write(530, message, true); } this.stateMachine.setStatus(FtpConnection.LOGOUT); this.stateMachine.setStatus(FtpConnection.IDLEEND); this.stateMachine.reset(); throw new FtpNotLoginException(); } } else { throw new RuntimeException("THIS MUST NOT HAPPEN!"); } } } /** * @throws FtpException * @throws IOException * */ private void onPASV() throws FtpException { boolean okay = false; this.closeDataConnection(); try { this.serverSocket = new ServerSocket(); SocketAddress socketAddress = null; if (this.ftpServer.isLocalhostOnly()) { /* bind socket to localhost */ socketAddress = new InetSocketAddress(this.ftpServer.getLocalHost(), 0); } this.serverSocket.bind(socketAddress); okay = true; final int port = this.serverSocket.getLocalPort(); final int p1 = port / 256; final int p2 = port - p1 * 256; if (this.ftpServer.isLocalhostOnly()) { /* localhost only */ this.write(227, "Entering Passive Mode. (127,0,0,1," + p1 + "," + p2 + ")."); } else { if (this.controlSocket.getLocalAddress().isLoopbackAddress()) { this.write(227, "Entering Passive Mode. (127,0,0,1," + p1 + "," + p2 + ")."); } else { String ip = this.controlSocket.getLocalAddress().getHostAddress(); ip = ip.replaceAll("\\.", ","); this.write(227, "Entering Passive Mode. (" + ip + "," + p1 + "," + p2 + ")."); } } return; } catch (final IOException e) { throw new FtpException(421, "could not open port"); } finally { if (!okay) { this.closeDataConnection(); } } } private void onPORT(final String params[]) throws IOException, FtpCommandSyntaxException { try { /* close old maybe existing data connection */ this.dataSocket.close(); } catch (final Throwable e) { } finally { this.dataSocket = null; } final String parts[] = params[1].split(","); if (parts.length != 6) { throw new FtpCommandSyntaxException(); } this.passiveIP = parts[0] + "." + parts[1] + "." + parts[2] + "." + parts[3]; this.passivePort = Integer.parseInt(parts[4]) * 256 + Integer.parseInt(parts[5]); this.write(200, "PORT command successful"); } private void onPWD() throws IOException, FtpException { this.write(257, "\"" + this.connectionState.getCurrentDir() + "\" is cwd."); } private void onQUIT() throws IOException, FtpException { this.stateMachine.setStatus(FtpConnection.LOGOUT); this.write(221, this.ftpServer.getFtpCommandHandler().onLogoutRequest(this.connectionState)); this.stateMachine.setStatus(FtpConnection.IDLEEND); } /** * @param commandParts * @throws FtpException * @throws IOException */ private void onREST(final String[] commandParts) throws FtpException, IOException { try { final long position = Long.parseLong(commandParts[1]); this.ftpServer.getFtpCommandHandler().onREST(this.connectionState, position); this.write(350, "Restarting at " + position + ". Send STORE or RETRIEVE"); } catch (final NumberFormatException e) { this.write(554, "Requested action not taken: invalid REST parameter."); } } private void onRETR(final String[] commandParts) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } /* * we need to make sure that the file exists before opening data * connection, see http://cr.yp.to/ftp/retr.html, RFC 959 * * this will cause the 550 file not found before opening the data * connection */ this.ftpServer.getFtpCommandHandler().getSize(this.connectionState, this.buildParameter(commandParts)); this.write(150, "Opening XY mode data connection for transfer"); long bytesWritten = 0; try { bytesWritten = this.ftpServer.getFtpCommandHandler().onRETR(this.dataSocket.getOutputStream(), this.connectionState, this.buildParameter(commandParts)); this.dataSocket.getOutputStream().flush(); this.dataSocket.shutdownOutput(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final IOException e) { throw new FtpException(426, e.getMessage()); } catch (final Exception e) { throw new FtpException(451, e.getMessage()); } /* we close the passive port after command */ this.write(226, "Transfer complete. " + bytesWritten + " bytes transfered!"); } finally { this.closeDataConnection(); } } /** * @param commandParts * @throws IOException * @throws FtpException * @throws FtpFileNotExistException */ private void onRMD(final String[] commandParts) throws IOException, FtpException { this.ftpServer.getFtpCommandHandler().removeDirectory(this.connectionState, this.buildParameter(commandParts)); this.write(250, "\"" + this.buildParameter(commandParts) + "\" removed."); } /** * @param commandParts * @throws FtpBadSequenceException * @throws FtpFileNotExistException * @throws IOException */ private void onRNFR(final String[] commandParts) throws FtpException, IOException { if (this.connectionState.getRenameFile() != null) { this.connectionState.setRenameFile(null); throw new FtpBadSequenceException(); } try { this.ftpServer.getFtpCommandHandler().renameFile(this.connectionState, this.buildParameter(commandParts)); } catch (final FtpException e) { this.connectionState.setRenameFile(null); throw e; } this.write(350, "\"" + this.buildParameter(commandParts) + "\" rename pending."); } /** * @param commandParts * @throws FtpBadSequenceException * @throws FtpFileNotExistException * @throws IOException */ private void onRNTO(final String[] commandParts) throws IOException, FtpException { if (this.connectionState.getRenameFile() == null) { /* a renameFile must exist, RNFR must be the command before RNTO */ this.connectionState.setRenameFile(null); throw new FtpBadSequenceException(); } try { this.ftpServer.getFtpCommandHandler().renameFile(this.connectionState, this.buildParameter(commandParts)); } finally { this.connectionState.setRenameFile(null); } this.write(250, "\"" + this.buildParameter(commandParts) + "\" rename successful."); } /** * @param commandParts * @throws IOException * @throws FtpFileNotExistException */ private void onSIZE(final String[] commandParts) throws FtpException, IOException { this.write(213, "" + this.ftpServer.getFtpCommandHandler().getSize(this.connectionState, this.buildParameter(commandParts))); } private void onSTOR(final String[] commandParts, final boolean append) throws IOException, FtpException { try { try { this.openDataConnection(); } catch (final IOException e) { throw new FtpException(425, "Can't open data connection"); } this.write(150, "Opening XY mode data connection for transfer"); long bytesRead = 0; try { bytesRead = this.ftpServer.getFtpCommandHandler().onSTOR(this.dataSocket.getInputStream(), this.connectionState, append, this.buildParameter(commandParts)); this.dataSocket.shutdownInput(); } catch (final FtpFileNotExistException e) { /* need another error code here */ throw new FtpException(450, "Requested file action not taken; File unavailable"); } catch (final FtpException e) { throw e; } catch (final IOException e) { throw new FtpException(426, e.getMessage()); } catch (final Exception e) { throw new FtpException(451, e.getMessage()); } /* we close the passive port after command */ this.write(226, "Transfer complete. " + bytesRead + " bytes received!"); } finally { this.closeDataConnection(); } } private void onSTRU(final String[] commandParts) throws IOException, FtpCommandParameterException { if ("F".equalsIgnoreCase(commandParts[1])) { this.write(200, "Command okay."); } else { throw new FtpCommandParameterException(); } } private void onSYST() throws IOException { this.write(215, "UNIX Type: L8"); } private void onTYPE(final String[] commandParts) throws IOException, FtpCommandParameterException { final String type = commandParts[1]; if ("A".equalsIgnoreCase(type)) { this.type = TYPE.ASCII; } else if ("I".equalsIgnoreCase(type)) { this.type = TYPE.BINARY; } else if ("L".equalsIgnoreCase(type)) { if (commandParts.length == 3 && "8".equals(commandParts[2])) { this.type = TYPE.BINARY; } else { throw new FtpCommandParameterException(); } } else { throw new FtpCommandParameterException(); } this.write(200, "Command okay"); } private void onUSER(final String params[]) throws IOException, FtpException { if (this.stateMachine.isFinal()) { this.stateMachine.reset(); } this.stateMachine.setStatus(FtpConnection.USER); this.connectionState.setUser(this.ftpServer.getFtpCommandHandler().getUser(params[1])); if (this.connectionState.getUser() != null) { if (this.connectionState.getUser().getPassword() == null) { final String message = this.ftpServer.getFtpCommandHandler().onLoginSuccessRequest(this.connectionState); if (message != null) { this.write(230, message, true); } this.write(230, "User logged in, proceed"); this.stateMachine.setStatus(FtpConnection.LOGIN); } else { this.write(331, "User name okay, need password"); } } else { final String message = this.ftpServer.getFtpCommandHandler().onLoginFailedMessage(this.connectionState); if (message != null) { this.write(530, message, true); } this.stateMachine.setStatus(FtpConnection.LOGOUT); this.stateMachine.setStatus(FtpConnection.IDLEEND); this.stateMachine.reset(); throw new FtpNotLoginException(); } } private void openDataConnection() throws IOException { if (this.dataSocket == null || !this.dataSocket.isConnected()) { if (this.serverSocket != null && this.serverSocket.isBound()) { /* PASV */ this.dataSocket = this.serverSocket.accept(); } else { /* PORT */ this.dataSocket = new Socket(this.passiveIP, this.passivePort); } } } public void run() { try { this.writeMultiLineAuto(220, this.ftpServer.getFtpCommandHandler().getWelcomeMessage(this.connectionState)); while (true) { final String command = this.reader.readLine(); if (command == null) { break; } if (this.ftpServer.isDebug()) { Log.L.info("REQ: " + command); } this.handleCommand(command); } } catch (final IOException e) { } finally { this.closeDataConnection(); try { this.controlSocket.close(); } catch (final Throwable e2) { } } } private void write(final int code, final String message) throws IOException { if (this.ftpServer.isDebug()) { Log.L.info("RESP: " + code + " " + message); } this.write(code, message, false); } private void write(final int code, final String message, final boolean multiLine) throws IOException { if (multiLine) { this.writer.write(code + "-" + message + "\r\n"); } else { this.writer.write(code + " " + message + "\r\n"); } this.writer.flush(); } private void writeMultiLineAuto(final int code, final String message) throws IOException { final String lines[] = Regex.getLines(message); if (lines != null) { for (int line = 0; line < lines.length; line++) { if (line == lines.length - 1) { this.writer.write(code + " " + lines[line] + "\r\n"); } else { this.writer.write(code + "-" + lines[line] + "\r\n"); } } } this.writer.flush(); } }
FtpConnection: added readtimeout
src/org/appwork/utils/net/ftpserver/FtpConnection.java
FtpConnection: added readtimeout
Java
bsd-2-clause
c5d3e6290a7f5891a15d6cf206ac3a45e36377b2
0
malensek/sing
package io.sigpipe.sing.stat; import java.util.List; import org.apache.commons.math3.util.FastMath; import io.sigpipe.sing.dataset.feature.Feature; public class SquaredError { private RunningStatistics sqErrs = new RunningStatistics(); private RunningStatistics actualStats = new RunningStatistics(); private RunningStatistics predictedStats = new RunningStatistics(); public SquaredError(List<Feature> actual, List<Feature> predicted) { if (actual.size() != predicted.size()) { throw new IllegalArgumentException( "List sizes must be equal"); } for (int i = 0; i < actual.size(); ++i) { Feature a = actual.get(i); Feature b = predicted.get(i); Feature err = a.subtract(b); double p = FastMath.pow(err.getDouble(), 2.0); sqErrs.put(p); actualStats.put(a.getDouble()); predictedStats.put(b.getDouble()); } } public void put(Feature actual, Feature predicted) { this.put(actual.getDouble(), predicted.getDouble()); } public void put(double actual, double predicted) { double err = actual - predicted; double p = FastMath.pow(err, 2.0); sqErrs.put(p); actualStats.put(actual); predictedStats.put(predicted); } public double RMSE() { return FastMath.sqrt(sqErrs.mean()); } public double NRMSE() { return RMSE() / (actualStats.max() - actualStats.min()); } public double CVRMSE() { return RMSE() / actualStats.mean(); } public SummaryStatistics actualSummary() { return new SummaryStatistics(actualStats); } public SummaryStatistics predictedSummary() { return new SummaryStatistics(predictedStats); } }
src/main/java/io/sigpipe/sing/stat/SquaredError.java
package io.sigpipe.sing.stat; import java.util.List; import org.apache.commons.math3.util.FastMath; import io.sigpipe.sing.dataset.feature.Feature; public class SquaredError { private RunningStatistics sqErrs = new RunningStatistics(); private RunningStatistics actualStats = new RunningStatistics(); private RunningStatistics predictedStats = new RunningStatistics(); public SquaredError(List<Feature> actual, List<Feature> predicted) { if (actual.size() != predicted.size()) { throw new IllegalArgumentException( "List sizes must be equal"); } for (int i = 0; i < actual.size(); ++i) { Feature a = actual.get(i); Feature b = predicted.get(i); Feature err = a.subtract(b); double p = FastMath.pow(err.getDouble(), 2.0); sqErrs.put(p); actualStats.put(a.getDouble()); predictedStats.put(b.getDouble()); } } public void put(Feature actual, Feature predicted) { Feature err = actual.subtract(predicted); double p = FastMath.pow(err.getDouble(), 2.0); sqErrs.put(p); actualStats.put(actual.getDouble()); predictedStats.put(predicted.getDouble()); } public void put(double actual, double predicted) { double err = actual - predicted; double p = FastMath.pow(err, 2.0); sqErrs.put(p); actualStats.put(actual); predictedStats.put(predicted); } public double RMSE() { return FastMath.sqrt(sqErrs.mean()); } public double NRMSE() { return RMSE() / (actualStats.max() - actualStats.min()); } public double CVRMSE() { return RMSE() / actualStats.mean(); } public SummaryStatistics actualSummary() { return new SummaryStatistics(actualStats); } public SummaryStatistics predictedSummary() { return new SummaryStatistics(predictedStats); } }
Consolidate put() methods
src/main/java/io/sigpipe/sing/stat/SquaredError.java
Consolidate put() methods
Java
mit
05efd3d1449b1ded01ecdc22e94953a743a08a59
0
jchambers/jvptree,jchambers/jvptree
package com.eatthepath.jvptree; class PartitionException extends Exception { private static final long serialVersionUID = 1L; }
src/main/java/com/eatthepath/jvptree/PartitionException.java
package com.eatthepath.jvptree; class PartitionException extends Exception { }
Added a `serialVersionUID` to suppress a warning.
src/main/java/com/eatthepath/jvptree/PartitionException.java
Added a `serialVersionUID` to suppress a warning.
Java
mit
e6d13cbc2f6a03bfc88d63cfaa983f1026ab3eb6
0
SettRaziel/java_visualization
package data.factory; import data.DataType; import data.entity.DataDomain; import data.entity.MetaData; import data.entity.datanumber.DataDate; import data.entity.datanumber.DataDouble; import data.entity.datanumber.DataInteger; import exception.MetaDataException; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.util.ArrayList; /** * Static factory class to create meta data from given input data * @author Benjamin Held (05-15-2016) * @version 0.1.1 * @since 07-04-2016 */ public class MetaDataFactory { public static MetaData parseMetaData(ArrayList<String[]> input) throws MetaDataException { return new MetaData(createDataType(input.get(0)), // data type createDomain(input.get(1)), // domain in x createDomain(input.get(2)), // domain in y createDomain(input.get(3))); // domain in z } private static DataDomain createDomain(String[] line) throws MetaDataException { switch (line[0]) { case "Double": return new DataDomain<>( line[1].trim(), new DataDouble(Double.parseDouble(line[2].trim())), new DataDouble(Double.parseDouble(line[3].trim())), Double.parseDouble(line[4].trim())); case "Integer": return new DataDomain<>( line[1].trim(), new DataInteger(Integer.parseInt(line[2].trim())), new DataInteger(Integer.parseInt(line[3].trim())), Integer.parseInt(line[4].trim())); case "LocalDate": return new DataDomain<>( line[1].trim(), new DataDate(LocalDate.parse(line[2].trim(), DateTimeFormatter.ofPattern("dd.MM.yyyy"))), new DataDate(LocalDate.parse(line[3].trim(), DateTimeFormatter.ofPattern("dd.MM.yyyy"))), Integer.parseInt(line[4].trim())); default: throw new MetaDataException("Error while parsing metadata: no valid type for data domain."); } } private static DataType createDataType(String[] line) throws MetaDataException { try { return new DataType(line[0].trim(), line[1].trim(), line[2].trim()); } catch (ArrayIndexOutOfBoundsException ex) { throw new MetaDataException("Not enough arguments to create valid meta data information."); } } }
src/data/factory/MetaDataFactory.java
package data.factory; import data.DataType; import data.entity.DataDomain; import data.entity.MetaData; import data.entity.datanumber.DataDate; import data.entity.datanumber.DataDouble; import data.entity.datanumber.DataInteger; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.util.ArrayList; /** * Static factory class to create meta data from given input data * @author Benjamin Held (05-15-2016) * @version 0.1.0 * @since 05-27-2016 */ public class MetaDataFactory { public static MetaData parseMetaData(ArrayList<String[]> input) throws IllegalArgumentException { return new MetaData(createDataType(input.get(0)), // data type createDomain(input.get(1)), // domain in x createDomain(input.get(2)), // domain in y createDomain(input.get(3))); // domain in z } private static DataDomain createDomain(String[] line) { switch (line[0]) { case "Double": return new DataDomain<>( line[1].trim(), new DataDouble(Double.parseDouble(line[2].trim())), new DataDouble(Double.parseDouble(line[3].trim())), Double.parseDouble(line[4].trim())); case "Integer": return new DataDomain<>( line[1].trim(), new DataInteger(Integer.parseInt(line[2].trim())), new DataInteger(Integer.parseInt(line[3].trim())), Integer.parseInt(line[4].trim())); case "LocalDate": return new DataDomain<>( line[1].trim(), new DataDate(LocalDate.parse(line[2].trim(), DateTimeFormatter.ofPattern("dd.MM.yyyy"))), new DataDate(LocalDate.parse(line[3].trim(), DateTimeFormatter.ofPattern("dd.MM.yyyy"))), Integer.parseInt(line[4].trim())); default: throw new IllegalArgumentException("Error while parsing metadata: no valid type."); } } private static DataType createDataType(String[] line) { return new DataType(line[0].trim(), line[1].trim(), line[2].trim()); } }
adjusted error Handling - replaced Exceptions by MetaDataException - added error handling when creating DataTypes
src/data/factory/MetaDataFactory.java
adjusted error Handling - replaced Exceptions by MetaDataException - added error handling when creating DataTypes
Java
mit
4cd62867919f904155b788a2e53ae135b33b1493
0
EcoGame/Eco,EcoGame/Eco
package eco.game; import java.util.Random; public class SplashText { private static String[] splashes; private static String splash = ""; static{ splashes = new String[]{ "95% Stable", "Thousands of bugs!", "Artifical but not intelligent", "java.lang.nullPointerException: splash not found. At: SplashText.java(15)", "10,000 lines!", "Multicultural!", "Go inject your bubby somewhere else" "Your turn to be a conquistador!", "How many of these are there?", "Font schmont! Who cares if you can read it?", "What is the difference between snowmen and snowwomen? Snowballs.", "How do astronomers organize a party? They planet.", "I went to the bank the other day and asked the banker to check my balance, so she pushed me!", "Did you hear about the kidnapping at school? It's okay. He woke up.", "If you ever get cold, just stand in the corner of a room for a while. They're normally around 90 degrees.", "I was wondering why the ball kept getting bigger and bigger, and then it hit me.", "What do cars eat on their toast? Traffic jam.", "Et tu, Brute?", "Artificial intelligence usually beats real stupidity.", "Gotta catch 'em all!", "Why is wood made out of splinters?", "Connecting to the NSA...", "Persistent pain is something that conflicted with his cosy picture of the world.", "Does fuzzy logic tickle?", "Be the change you want to see in the world", "I fear fear itself", "Relax, it's only ones and zeros", "You're lucky if I can read my own code", "Computers are like air conditioners, they stop working properly if you open Windows.", "'Lemme just draw it on the board.'", "Never trust an operating system you don't have sources for.", "Windows - The best $89 solitaire game you can buy", "This is a P. Heikompf production", "Did you hear about the Italian chef that died? Yeah, he pasta way." }; } public static void newSplash(){ Random random = new Random(); splash = splashes[random.nextInt(splashes.length)]; } public static String getSplash(){ return splash; } }
src/eco/game/SplashText.java
package eco.game; import java.util.Random; public class SplashText { private static String[] splashes; private static String splash = ""; static{ splashes = new String[]{ "95% Stable", "Thousands of bugs!", "Artifical but not intelligent", "java.lang.nullPointerException: splash not found. At: SplashText.java(15)", "10,000 lines!", "Multicultural!", "Go inject your bubby somewhere else" "Your turn to be a conquistador!", "How many of these are there?", "Font schmont! Who cares if you can read it?", "What is the difference between snowmen and snowwomen? Snowballs.", "How do astronomers organize a party? They planet.", "I went to the bank the other day and asked the banker to check my balance, so she pushed me!", "Did you hear about the kidnapping at school? It's okay. He woke up.", "If you ever get cold, just stand in the corner of a room for a while. They're normally around 90 degrees.", "I was wondering why the ball kept getting bigger and bigger, and then it hit me.", "What do cars eat on their toast? Traffic jam.", "Et tu, Brute?", "Artificial intelligence usually beats real stupidity.", "Gotta catch 'em all!", "Why is wood made out of splinters?", "Connecting to the NSA...", "Persistent pain is something that conflicted with his cosy picture of the world.", "Does fuzzy logic tickle?", "Relax, it's only ones and zeros", "You're lucky if I can read my own code", "Computers are like air conditioners, they stop working properly if you open Windows.", "'Lemme just draw it on the board.'", "Never trust an operating system you don't have sources for.", "Windows - The best $89 solitaire game you can buy", "This is a P. Heikompf production", "Did you hear about the Italian chef that died? Yeah, he pasta way." }; } public static void newSplash(){ Random random = new Random(); splash = splashes[random.nextInt(splashes.length)]; } public static String getSplash(){ return splash; } }
Update SplashText.java
src/eco/game/SplashText.java
Update SplashText.java
Java
mit
221a61f7d89e64e8a6a84ebaaf8ebc1b5a72f232
0
xu6148152/binea_project_for_android,xu6148152/binea_project_for_android,xu6148152/binea_project_for_android,xu6148152/binea_project_for_android,xu6148152/binea_project_for_android
package com.example.android.bluetoothchat; import android.bluetooth.BluetoothSocket; import android.util.Log; import com._94fifty.device.BluetoothDeviceBridgeFactory; import com._94fifty.device.DeviceBridge; import com._94fifty.model.request.AbstractRequest; import com._94fifty.model.response.AbstractResponse; import com._94fifty.model.response.EndDribblingActivityResponse; import com._94fifty.model.response.EndRawStreamResponse; import com._94fifty.model.response.EndShootingActivityResponse; import com._94fifty.model.response.StartDribblingActivityResponse; import com._94fifty.model.response.StartRawStreamResponse; import com._94fifty.model.response.StartShootingActivityResponse; import com._94fifty.model.response.notification.AbstractNotification; import com._94fifty.model.response.notification.DribblingActivityRecordNotification; import com._94fifty.model.response.notification.RawDataNotification; import com._94fifty.model.response.notification.ShootingActivityRecordNotification; import com._94fifty.model.type.ActivityLimitBasis; import com._94fifty.model.type.ConnectionState; import com._94fifty.model.type.InvocationType; import com._94fifty.model.type.NotificationTrigger; import com._94fifty.model.type.RequestStatus; /** * Created by xubinggui on 7/20/15. */ public class BasketDataDelegate implements DeviceBridge.Delegate { private final static String TAG = BasketDataDelegate.class.getCanonicalName(); private DeviceBridge mDeviceBridge; private ConnectionState mCurrentConnectionState; private BasketballDataNotificationListener mListener; public BasketDataDelegate(BluetoothSocket socket) { BluetoothDeviceBridgeFactory factory = new BluetoothDeviceBridgeFactory(); mDeviceBridge = factory.create(socket, this); mDeviceBridge.addListener(this); } @Override public void onConnectionStateChanged(ConnectionState connectionState) { Log.d(TAG, "onConnectionStateChanged " ); mCurrentConnectionState = connectionState; //if(connectionState == ConnectionState.Open){ // Log.d(TAG, "onConnectionStateChanged open "); // mDeviceBridge.executeRequest(new StartRawStreamRequest()); //} } @Override public void onNotification(AbstractNotification abstractNotification) { Log.d(TAG, "onNotification" + abstractNotification); //for(int i =0;i<notification.getRawData().length;i++){ // if(notification.getRawData()[i] != 0) // Log.d(TAG,"onNotification " + notification.getRawData()[i]); //} if(abstractNotification.getType() == InvocationType.DribblingActivityRecord) { DribblingActivityRecordNotification notification = (DribblingActivityRecordNotification) abstractNotification; Log.d(TAG, "DribblingActivityRecordNotification onNotification " + notification.getRecord().getTotalDribbles()); mListener.dribblingActivityRecord(notification); }else if(abstractNotification.getType() == InvocationType.ShootingActivityRecord){ ShootingActivityRecordNotification notification = (ShootingActivityRecordNotification) abstractNotification; Log.d(TAG, "ShootingActivityRecordNotification onNotification " + notification.getRecord().getAverageShotSpin()); }else if(abstractNotification.getType() == InvocationType.RawData){ RawDataNotification notification = (RawDataNotification) abstractNotification; String readMessage = Byte2Hex.convert2byte(notification.getRawData()); FileUtil.saveData(readMessage); } } @Override public void onResponse(AbstractResponse abstractResponse) { Log.d(TAG, "onResponse " + abstractResponse.toString()); } @Override public void onResponseError(byte[] bytes, String s) { Log.d(TAG, "onResponseError " + s); } public ConnectionState getConnectionState(){ return mCurrentConnectionState; } public RequestStatus sendRequest(AbstractRequest request){ if(mCurrentConnectionState == ConnectionState.Open) { return mDeviceBridge.executeRequest(request); } return RequestStatus.Error; } public void setBasketballDataMotificationListener(BasketballDataNotificationListener listener){ mListener = listener; } public void startDribblingActivity(){ DeviceFacade.startDribblingActivity(mDeviceBridge, ActivityLimitBasis.Time, NotificationTrigger.Time, 600000, 200, 5, 1, new DeviceResponseCallback<StartDribblingActivityResponse>() { @Override protected void onResponse(StartDribblingActivityResponse response) { Log.d(TAG, "startDribblingActivity response " + response.getStatus().isOK()); } }); } public void endDribblingActivity(){ DeviceFacade.endDribblingActivity(mDeviceBridge, new DeviceResponseCallback<EndDribblingActivityResponse>() { @Override protected void onResponse(EndDribblingActivityResponse response) { Log.d(TAG, "endDribblingActivity onResponse " + response.getStatus().isOK()); } }); } public void startShootingActivity(){ DeviceFacade.startShootingActivity(mDeviceBridge, ActivityLimitBasis.Event, NotificationTrigger.Event, 20, 1, 1, new DeviceResponseCallback<StartShootingActivityResponse>() { @Override protected void onResponse(StartShootingActivityResponse response) { Log.d(TAG, "startShootingActivity response " + response.getStatus().isOK()); } }); } public void endShootingActivity(){ DeviceFacade.endShootingActivity(mDeviceBridge, new DeviceResponseCallback<EndShootingActivityResponse>() { @Override protected void onResponse(EndShootingActivityResponse response) { Log.d(TAG, "endShootingActivity onResponse " + response.getStatus().isOK()); } }); } public void startRawStream(){ DeviceFacade.startRawStream(mDeviceBridge, new DeviceResponseCallback<StartRawStreamResponse>() { @Override protected void onResponse(StartRawStreamResponse response) { Log.d(TAG, "startRawStream response " + response.getStatus().isOK()); } }); } public void endRawStream(){ DeviceFacade.endRawStream(mDeviceBridge, new DeviceResponseCallback<EndRawStreamResponse>() { @Override protected void onResponse(EndRawStreamResponse response) { Log.d(TAG, "endRawStream onResponse " + response.getStatus().isOK()); } }); } }
BluetoothChat/Application/src/main/java/com/example/android/bluetoothchat/BasketDataDelegate.java
package com.example.android.bluetoothchat; import android.bluetooth.BluetoothSocket; import android.util.Log; import com._94fifty.device.BluetoothDeviceBridgeFactory; import com._94fifty.device.DeviceBridge; import com._94fifty.model.request.AbstractRequest; import com._94fifty.model.response.AbstractResponse; import com._94fifty.model.response.EndDribblingActivityResponse; import com._94fifty.model.response.EndRawStreamResponse; import com._94fifty.model.response.EndShootingActivityResponse; import com._94fifty.model.response.StartDribblingActivityResponse; import com._94fifty.model.response.StartRawStreamResponse; import com._94fifty.model.response.StartShootingActivityResponse; import com._94fifty.model.response.notification.AbstractNotification; import com._94fifty.model.response.notification.DribblingActivityRecordNotification; import com._94fifty.model.response.notification.RawDataNotification; import com._94fifty.model.response.notification.ShootingActivityRecordNotification; import com._94fifty.model.type.ActivityLimitBasis; import com._94fifty.model.type.ConnectionState; import com._94fifty.model.type.InvocationType; import com._94fifty.model.type.NotificationTrigger; import com._94fifty.model.type.RequestStatus; /** * Created by xubinggui on 7/20/15. */ public class BasketDataDelegate implements DeviceBridge.Delegate { private final static String TAG = BasketDataDelegate.class.getCanonicalName(); private DeviceBridge mDeviceBridge; private ConnectionState mCurrentConnectionState; private BasketballDataNotificationListener mListener; public BasketDataDelegate(BluetoothSocket socket) { BluetoothDeviceBridgeFactory factory = new BluetoothDeviceBridgeFactory(); mDeviceBridge = factory.create(socket, this); mDeviceBridge.addListener(this); } @Override public void onConnectionStateChanged(ConnectionState connectionState) { Log.d(TAG, "onConnectionStateChanged " ); mCurrentConnectionState = connectionState; //if(connectionState == ConnectionState.Open){ // Log.d(TAG, "onConnectionStateChanged open "); // mDeviceBridge.executeRequest(new StartRawStreamRequest()); //} } @Override public void onNotification(AbstractNotification abstractNotification) { Log.d(TAG, "onNotification" + abstractNotification); //for(int i =0;i<notification.getRawData().length;i++){ // if(notification.getRawData()[i] != 0) // Log.d(TAG,"onNotification " + notification.getRawData()[i]); //} if(abstractNotification.getType() == InvocationType.DribblingActivityRecord) { DribblingActivityRecordNotification notification = (DribblingActivityRecordNotification) abstractNotification; Log.d(TAG, "DribblingActivityRecordNotification onNotification " + notification.getRecord().getTotalDribbles()); mListener.dribblingActivityRecord(notification); }else if(abstractNotification.getType() == InvocationType.ShootingActivityRecord){ ShootingActivityRecordNotification notification = (ShootingActivityRecordNotification) abstractNotification; Log.d(TAG, "ShootingActivityRecordNotification onNotification " + notification.getRecord().getAverageShotSpin()); }else if(abstractNotification.getType() == InvocationType.RawData){ RawDataNotification notification = (RawDataNotification) abstractNotification; String readMessage = Byte2Hex.convert2byte(notification.getRawData()); FileUtil.saveData(readMessage); } } @Override public void onResponse(AbstractResponse abstractResponse) { Log.d(TAG, "onResponse " + abstractResponse.toString()); abstractResponse. } @Override public void onResponseError(byte[] bytes, String s) { Log.d(TAG, "onResponseError " + s); } public ConnectionState getConnectionState(){ return mCurrentConnectionState; } public RequestStatus sendRequest(AbstractRequest request){ if(mCurrentConnectionState == ConnectionState.Open) { return mDeviceBridge.executeRequest(request); } return RequestStatus.Error; } public void setBasketballDataMotificationListener(BasketballDataNotificationListener listener){ mListener = listener; } public void startDribblingActivity(){ DeviceFacade.startDribblingActivity(mDeviceBridge, ActivityLimitBasis.Time, NotificationTrigger.Time, 600000, 200, 5, 1, new DeviceResponseCallback<StartDribblingActivityResponse>() { @Override protected void onResponse(StartDribblingActivityResponse response) { Log.d(TAG, "startDribblingActivity response " + response.getStatus().isOK()); } }); } public void endDribblingActivity(){ DeviceFacade.endDribblingActivity(mDeviceBridge, new DeviceResponseCallback<EndDribblingActivityResponse>() { @Override protected void onResponse(EndDribblingActivityResponse response) { Log.d(TAG, "endDribblingActivity onResponse " + response.getStatus().isOK()); } }); } public void startShootingActivity(){ DeviceFacade.startShootingActivity(mDeviceBridge, ActivityLimitBasis.Event, NotificationTrigger.Event, 20, 1, 1, new DeviceResponseCallback<StartShootingActivityResponse>() { @Override protected void onResponse(StartShootingActivityResponse response) { Log.d(TAG, "startShootingActivity response " + response.getStatus().isOK()); } }); } public void endShootingActivity(){ DeviceFacade.endShootingActivity(mDeviceBridge, new DeviceResponseCallback<EndShootingActivityResponse>() { @Override protected void onResponse(EndShootingActivityResponse response) { Log.d(TAG, "endShootingActivity onResponse " + response.getStatus().isOK()); } }); } public void startRawStream(){ DeviceFacade.startRawStream(mDeviceBridge, new DeviceResponseCallback<StartRawStreamResponse>() { @Override protected void onResponse(StartRawStreamResponse response) { Log.d(TAG, "startRawStream response " + response.getStatus().isOK()); } }); } public void endRawStream(){ DeviceFacade.endRawStream(mDeviceBridge, new DeviceResponseCallback<EndRawStreamResponse>() { @Override protected void onResponse(EndRawStreamResponse response) { Log.d(TAG, "endRawStream onResponse " + response.getStatus().isOK()); } }); } }
update
BluetoothChat/Application/src/main/java/com/example/android/bluetoothchat/BasketDataDelegate.java
update
Java
mit
679c019a768c76a021244f2051b70c6540cf0c4a
0
jenkinsci/tasks-plugin,amuniz/tasks-plugin,fargies/jenkins-valgrind-plugin,fargies/jenkins-valgrind-plugin,amuniz/tasks-plugin,jenkinsci/tasks-plugin,fargies/jenkins-valgrind-plugin,jenkinsci/tasks-plugin,amuniz/tasks-plugin
package hudson.plugins.tasks.util; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Action; import java.io.IOException; import java.util.List; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletResponse; import org.kohsuke.stapler.Ancestor; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import edu.umd.cs.findbugs.annotations.SuppressWarnings; /** * A project action displays a link on the side panel of a project. * * @param <T> * result action type * @author Ulli Hafner */ public abstract class AbstractProjectAction<T extends ResultAction<?>> implements Action { /** One year (in seconds). */ private static final int ONE_YEAR = 60 * 60 * 24 * 365; /** Project that owns this action. */ @SuppressWarnings("Se") private final AbstractProject<?, ?> project; /** The type of the result action. */ private final Class<T> resultActionType; /** The icon URL of this action: it will be shown as soon as a result is available. */ private final String iconUrl; /** URL to the results of the last build. */ private final String resultsUrl; /** * Creates a new instance of <code>AbstractProjectAction</code>. * * @param project * the project that owns this action * @param resultActionType * the type of the result action * @param iconUrl * the icon URL of this action: it will be shown as soon as a * result is available. * @param pluginName * the plug-in name */ public AbstractProjectAction(final AbstractProject<?, ?> project, final Class<T> resultActionType, final String iconUrl, final String pluginName) { this.project = project; this.resultActionType = resultActionType; this.iconUrl = iconUrl; this.resultsUrl = "../lastBuild/" + pluginName + "Result"; } /** * Returns whether we should display the toggle graph type links. * * @return <code>true</code> if we should display the toggle graph type * links */ public final boolean isHealthinessEnabled() { ResultAction<?> lastAction = getLastAction(); if (lastAction != null) { return lastAction.getHealthReportBuilder().isEnabled(); } return false; } /** * Returns the project. * * @return the project */ public final AbstractProject<?, ?> getProject() { return project; } /** * Returns whether we have enough valid results in order to draw a * meaningful graph. * * @param build * the build to look backward from * @return <code>true</code> if the results are valid in order to draw a * graph */ public final boolean hasValidResults(final AbstractBuild<?, ?> build) { if (build != null) { ResultAction<?> resultAction = build.getAction(resultActionType); if (resultAction != null) { return resultAction.hasPreviousResultAction(); } } return false; } /** * Returns the icon URL for the side-panel in the project screen. If there * is yet no valid result, then <code>null</code> is returned. * * @return the icon URL for the side-panel in the project screen */ public String getIconFileName() { if (getLastAction() != null) { return iconUrl; } return null; } /** * Returns the last valid result action. * * @return the last valid result action, or <code>null</code> if no such action is found */ public ResultAction<?> getLastAction() { AbstractBuild<?, ?> lastBuild = project.getLastBuild(); if (lastBuild != null) { if (lastBuild.isBuilding()) { lastBuild = lastBuild.getPreviousBuild(); } if (lastBuild != null) { return lastBuild.getAction(resultActionType); } } return null; } /** * Display the trend graph. Delegates to the the associated * {@link ResultAction}. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error in * {@link ResultAction#doGraph(StaplerRequest, StaplerResponse)} */ public void doTrend(final StaplerRequest request, final StaplerResponse response) throws IOException { createGraph(request, response); } /** * Creates a trend graph or map. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error in * {@link ResultAction#doGraph(StaplerRequest, StaplerResponse)} */ private void createGraph(final StaplerRequest request, final StaplerResponse response) throws IOException { ResultAction<?> action = getLastAction(); if (action == null) { response.setStatus(HttpServletResponse.SC_NOT_FOUND); } else { action.doGraph(request, response); } } /** * Display the trend map. Delegates to the the associated * {@link ResultAction}. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error */ public void doTrendMap(final StaplerRequest request, final StaplerResponse response) throws IOException { ResultAction<?> action = getLastAction(); if (action == null) { response.setStatus(HttpServletResponse.SC_NOT_FOUND); } else { action.doGraphMap(request, response); } } /** * * Redirects the index page to the last result. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error */ public void doIndex(final StaplerRequest request, final StaplerResponse response) throws IOException { response.sendRedirect2(resultsUrl); } /** * Changes the trend graph display mode. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error */ public void doFlipTrend(final StaplerRequest request, final StaplerResponse response) throws IOException { boolean useHealthBuilder = true; Cookie[] cookies = request.getCookies(); if (cookies != null) { for (Cookie cookie : cookies) { if (cookie.getName().equals(getCookieName())) { useHealthBuilder = Boolean.parseBoolean(cookie.getValue()); } } } useHealthBuilder = !useHealthBuilder; Cookie cookie = new Cookie(getCookieName(), String.valueOf(useHealthBuilder)); List<?> ancestors = request.getAncestors(); Ancestor ancestor = (Ancestor) ancestors.get(ancestors.size() - 2); cookie.setPath(ancestor.getUrl()); cookie.setMaxAge(ONE_YEAR); response.addCookie(cookie); response.sendRedirect(".."); } /** * Returns the flip trend cookie name. * * @return the flip trend cookie name. */ protected abstract String getCookieName(); }
src/main/java/hudson/plugins/tasks/util/AbstractProjectAction.java
package hudson.plugins.tasks.util; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Action; import java.io.IOException; import java.util.List; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletResponse; import org.kohsuke.stapler.Ancestor; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import edu.umd.cs.findbugs.annotations.SuppressWarnings; /** * A project action displays a link on the side panel of a project. * * @param <T> * result action type * @author Ulli Hafner */ public abstract class AbstractProjectAction<T extends ResultAction<?>> implements Action { /** One year (in seconds). */ private static final int ONE_YEAR = 60 * 60 * 24 * 365; /** Project that owns this action. */ @SuppressWarnings("Se") private final AbstractProject<?, ?> project; /** The type of the result action. */ private final Class<T> resultActionType; /** The icon URL of this action: it will be shown as soon as a result is available. */ private final String iconUrl; /** URL to the results of the last build. */ private final String resultsUrl; /** * Creates a new instance of <code>AbstractProjectAction</code>. * * @param project * the project that owns this action * @param resultActionType * the type of the result action * @param iconUrl * the icon URL of this action: it will be shown as soon as a * result is available. * @param pluginName * the plug-in name */ public AbstractProjectAction(final AbstractProject<?, ?> project, final Class<T> resultActionType, final String iconUrl, final String pluginName) { this.project = project; this.resultActionType = resultActionType; this.iconUrl = iconUrl; this.resultsUrl = "../lastBuild/" + pluginName + "Result"; } /** * Returns whether we should display the toggle graph type links. * * @return <code>true</code> if we should display the toggle graph type * links */ public final boolean isHealthinessEnabled() { ResultAction<?> lastAction = getLastAction(); if (lastAction != null) { return lastAction.getHealthReportBuilder().isEnabled(); } return false; } /** * Returns the project. * * @return the project */ public final AbstractProject<?, ?> getProject() { return project; } /** * Returns whether we have enough valid results in order to draw a * meaningful graph. * * @param build * the build to look backward from * @return <code>true</code> if the results are valid in order to draw a * graph */ public final boolean hasValidResults(final AbstractBuild<?, ?> build) { if (build != null) { ResultAction<?> resultAction = build.getAction(resultActionType); if (resultAction != null) { return resultAction.hasPreviousResultAction(); } } return false; } /** * Returns the icon URL for the side-panel in the project screen. If there * is yet no valid result, then <code>null</code> is returned. * * @return the icon URL for the side-panel in the project screen */ public String getIconFileName() { if (getLastAction() != null) { return iconUrl; } return null; } /** * Returns the last valid result action. * * @return the last valid result action, or <code>null</code> if no such action is found */ public ResultAction<?> getLastAction() { AbstractBuild<?, ?> lastBuild = project.getLastBuild(); if (lastBuild != null) { return lastBuild.getAction(resultActionType); } return null; } /** * Display the trend graph. Delegates to the the associated * {@link ResultAction}. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error in * {@link ResultAction#doGraph(StaplerRequest, StaplerResponse)} */ public void doTrend(final StaplerRequest request, final StaplerResponse response) throws IOException { createGraph(request, response); } /** * Creates a trend graph or map. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error in * {@link ResultAction#doGraph(StaplerRequest, StaplerResponse)} */ private void createGraph(final StaplerRequest request, final StaplerResponse response) throws IOException { ResultAction<?> action = getLastAction(); if (action == null) { response.setStatus(HttpServletResponse.SC_NOT_FOUND); } else { action.doGraph(request, response); } } /** * Display the trend map. Delegates to the the associated * {@link ResultAction}. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error */ public void doTrendMap(final StaplerRequest request, final StaplerResponse response) throws IOException { ResultAction<?> action = getLastAction(); if (action == null) { response.setStatus(HttpServletResponse.SC_NOT_FOUND); } else { action.doGraphMap(request, response); } } /** * * Redirects the index page to the last result. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error */ public void doIndex(final StaplerRequest request, final StaplerResponse response) throws IOException { response.sendRedirect2(resultsUrl); } /** * Changes the trend graph display mode. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error */ public void doFlipTrend(final StaplerRequest request, final StaplerResponse response) throws IOException { boolean useHealthBuilder = true; Cookie[] cookies = request.getCookies(); if (cookies != null) { for (Cookie cookie : cookies) { if (cookie.getName().equals(getCookieName())) { useHealthBuilder = Boolean.parseBoolean(cookie.getValue()); } } } useHealthBuilder = !useHealthBuilder; Cookie cookie = new Cookie(getCookieName(), String.valueOf(useHealthBuilder)); List<?> ancestors = request.getAncestors(); Ancestor ancestor = (Ancestor) ancestors.get(ancestors.size() - 2); cookie.setPath(ancestor.getUrl()); cookie.setMaxAge(ONE_YEAR); response.addCookie(cookie); response.sendRedirect(".."); } /** * Returns the flip trend cookie name. * * @return the flip trend cookie name. */ protected abstract String getCookieName(); }
1468: Fixed. Don't use a build for trend reporting, if the build is still not finished.
src/main/java/hudson/plugins/tasks/util/AbstractProjectAction.java
1468: Fixed. Don't use a build for trend reporting, if the build is still not finished.
Java
mit
d15972bb49b10691ee3b878ab0dc699d397cf557
0
domisum/AuxiliumLib
package de.domisum.lib.auxilium.util.ticker; import de.domisum.lib.auxilium.display.DurationDisplay; import de.domisum.lib.auxilium.util.java.ThreadUtil; import de.domisum.lib.auxilium.util.java.annotations.API; import de.domisum.lib.auxilium.util.time.DurationUtil; import lombok.Getter; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.time.Duration; import java.time.Instant; @API public abstract class Ticker { protected final Logger logger = LoggerFactory.getLogger(getClass()); // CONSTANTS private static final Duration TIMEOUT_DEFAULT = null; // SETTINGS @Getter private final String name; private final Duration interval; @Nullable private final Duration timeout; // STATUS private Thread tickThread; private Thread watchdogThread; private Status status = Status.READY; private Instant lastTickStart; // INIT @API protected Ticker(String name, Duration interval, @Nullable Duration timeout) { Validate.notNull(name, "name can't be null"); Validate.notNull(interval, "interval can't be null"); Validate.isTrue(interval.compareTo(Duration.ZERO) > 0, "interval has to be greater than zero"); this.name = name; this.interval = interval; this.timeout = timeout; } @API protected Ticker(String name, Duration interval) { this(name, interval, TIMEOUT_DEFAULT); } // GETTERS @API public synchronized boolean isRunning() { return status == Status.RUNNING; } @API public Thread getTickThread() { if(status != Status.RUNNING) throw new IllegalStateException("can't get tick thread while not running"); return tickThread; } // CONTROL @API public synchronized void start() { if(status != Status.READY) throw new IllegalStateException("Can't start ticker with status "+status); logger.info("Starting ticker {}...", name); status = Status.RUNNING; startThreads(); } private void startThreads() { tickThread = ThreadUtil.createAndStartThread(this::run, name); watchdogThread = ThreadUtil.createAndStartDaemonThread(this::watchdogRun, "watchDog-"+name); } @API public synchronized void stopAndWaitForCompletion() { stop(true); } @API public synchronized void stopAndIgnoreCompletion() { stop(false); } protected synchronized void stop(boolean waitForCompletion) { if(status != Status.RUNNING) return; logger.info("Stopping ticker {} (Waiting for completion: {})...", name, waitForCompletion); status = Status.STOPPED; watchdogThread.interrupt(); if(waitForCompletion && (Thread.currentThread() != tickThread)) { ThreadUtil.join(tickThread); logger.info("Ticker {} completed", name); } } // TICK private void run() { while(status == Status.RUNNING) { lastTickStart = Instant.now(); tickCaught(); lastTickStart = null; if(status == Status.RUNNING) ThreadUtil.sleep(interval); } } private void tickCaught() { try { tick(); } catch(RuntimeException e) { logger.error("Exception occured during tick", e); } } protected abstract void tick(); // WATCHDOG private void watchdogRun() { while(!Thread.interrupted()) { watchdogTick(); ThreadUtil.sleep(Duration.ofSeconds(1)); } } private void watchdogTick() { if(timeout == null) return; // get local reference to avoid impact of changes in variable during run of method Instant lastTickStart = this.lastTickStart; if(lastTickStart == null) return; if(DurationUtil.isOlderThan(lastTickStart, timeout)) timeout(timeout); } private void timeout(Duration timeout) { logger.error( "Ticker {} timed out (after {}). Current stacktrace:\n{}", name, DurationDisplay.of(timeout), ThreadUtil.getThreadToString(tickThread) ); tickThread.interrupt(); ThreadUtil.tryKill(tickThread); watchdogThread.interrupt(); lastTickStart = null; if(status == Status.RUNNING) { logger.info("Starting ticker {} back up...", name); startThreads(); } } // STATUS private enum Status { READY, RUNNING, STOPPED } }
src/main/java/de/domisum/lib/auxilium/util/ticker/Ticker.java
package de.domisum.lib.auxilium.util.ticker; import de.domisum.lib.auxilium.display.DurationDisplay; import de.domisum.lib.auxilium.util.java.ThreadUtil; import de.domisum.lib.auxilium.util.java.annotations.API; import de.domisum.lib.auxilium.util.time.DurationUtil; import lombok.Getter; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.time.Duration; import java.time.Instant; @API public abstract class Ticker { protected final Logger logger = LoggerFactory.getLogger(getClass()); // CONSTANTS private static final Duration TIMEOUT_DEFAULT = null; // SETTINGS @Getter private final String name; private final Duration interval; @Nullable private final Duration timeout; // STATUS private Thread tickThread; private Thread watchdogThread; private Status status = Status.READY; private Instant lastTickStart; // INIT @API protected Ticker(String name, Duration interval, @Nullable Duration timeout) { Validate.notNull(name, "name can't be null"); Validate.notNull(interval, "interval can't be null"); Validate.isTrue(interval.compareTo(Duration.ZERO) > 0, "interval has to be greater than zero"); this.name = name; this.interval = interval; this.timeout = timeout; } @API protected Ticker(String name, Duration interval) { this(name, interval, TIMEOUT_DEFAULT); } // GETTERS @API public synchronized boolean isRunning() { return status == Status.RUNNING; } @API public Thread getTickThread() { if(status != Status.RUNNING) throw new IllegalStateException("can't get tick thread while not running"); return tickThread; } // CONTROL @API public synchronized void start() { if(status != Status.READY) throw new IllegalStateException("Can't start ticker with status "+status); logger.info("Starting ticker {}...", name); status = Status.RUNNING; startThreads(); } private void startThreads() { tickThread = ThreadUtil.createAndStartThread(this::run, name); watchdogThread = ThreadUtil.createAndStartDaemonThread(this::watchdogRun, "watchDog-"+name); } @API public synchronized void stopAndWaitForCompletion() { stop(true); } @API public synchronized void stopAndIgnoreCompletion() { stop(false); } protected synchronized void stop(boolean waitForCompletion) { if(status == Status.STOPPED) return; if(status != Status.RUNNING) throw new IllegalStateException("Can't stop ticker with status "+status); logger.info("Stopping ticker {} (Waiting for completion: {})...", name, waitForCompletion); status = Status.STOPPED; watchdogThread.interrupt(); if(waitForCompletion && (Thread.currentThread() != tickThread)) { ThreadUtil.join(tickThread); logger.info("Ticker {} completed", name); } } // TICK private void run() { while(status == Status.RUNNING) { lastTickStart = Instant.now(); tickCaught(); lastTickStart = null; if(status == Status.RUNNING) ThreadUtil.sleep(interval); } } private void tickCaught() { try { tick(); } catch(RuntimeException e) { logger.error("Exception occured during tick", e); } } protected abstract void tick(); // WATCHDOG private void watchdogRun() { while(!Thread.interrupted()) { watchdogTick(); ThreadUtil.sleep(Duration.ofSeconds(1)); } } private void watchdogTick() { if(timeout == null) return; // get local reference to avoid impact of changes in variable during run of method Instant lastTickStart = this.lastTickStart; if(lastTickStart == null) return; if(DurationUtil.isOlderThan(lastTickStart, timeout)) timeout(timeout); } private void timeout(Duration timeout) { logger.error( "Ticker {} timed out (after {}). Current stacktrace:\n{}", name, DurationDisplay.of(timeout), ThreadUtil.getThreadToString(tickThread) ); tickThread.interrupt(); ThreadUtil.tryKill(tickThread); watchdogThread.interrupt(); lastTickStart = null; if(status == Status.RUNNING) { logger.info("Starting ticker {} back up...", name); startThreads(); } } // STATUS private enum Status { READY, RUNNING, STOPPED } }
Removed ticker stop error message
src/main/java/de/domisum/lib/auxilium/util/ticker/Ticker.java
Removed ticker stop error message
Java
mit
5a5295b7cc7835e747b0b4a0269c0bb03ee2e598
0
Eddyosos/integracao201701
package com.github.Eddyosos.integracao20171.esus.cds.consumoalimentar; import br.gov.saude.esus.cds.transport.generated.thrift.common.UnicaLotacaoHeaderThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.FichaConsumoAlimentarThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.PerguntaQuestionarioCriancasComMaisDoisAnosThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.PerguntaQuestionarioCriancasDeSeisVinteTresMesesThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.PerguntaQuestionarioCriancasMenoresSeisMesesThrift; import com.github.Eddyosos.integracao20171.esus.cds.common.UnicaLotacaoHeader; import java.util.Iterator; import java.util.LinkedList; import java.util.List; public class FichaConsumoAlimentar { private FichaConsumoAlimentarThrift instance = new FichaConsumoAlimentarThrift(); protected FichaConsumoAlimentar(FichaConsumoAlimentarThrift instance){ this.instance = instance; } public FichaConsumoAlimentar deepCopy() { return new FichaConsumoAlimentar(instance.deepCopy()); } public UnicaLotacaoHeader getHeaderTransport() { return new UnicaLotacaoHeader(instance.getHeaderTransport()); } public void unsetHeaderTransport() { instance.unsetHeaderTransport(); } public boolean isSetHeaderTransport() { return instance.isSetHeaderTransport(); } public void setHeaderTransportIsSet(boolean value) { instance.setHeaderTransportIsSet(value); } public String getNumeroCartaoSus() { return instance.getNumeroCartaoSus(); } public void setNumeroCartaoSus(String numeroCartaoSus) { instance.setNumeroCartaoSus(numeroCartaoSus); } public void unsetNumeroCartaoSus() { instance.unsetNumeroCartaoSus(); } public void setNumeroCartaoSusIsSet(boolean value) { instance.setNumeroCartaoSusIsSet(value); } public String getIdentificacaoUsuario() { return instance.getIdentificacaoUsuario(); } public void setIdentificacaoUsuario(String identificacaoUsuario) { instance.setIdentificacaoUsuario(identificacaoUsuario); } public void unsetIdentificacaoUsuario() { instance.unsetIdentificacaoUsuario(); } public boolean isSetIdentificacaoUsuario() { return instance.isSetIdentificacaoUsuario(); } public void setIdentificacaoUsuarioIsSet(boolean value) { instance.setIdentificacaoUsuarioIsSet(value); } public long getDataNascimento() { return instance.getDataNascimento(); } public void setDataNascimento(long dataNascimento) { instance.setDataNascimento(dataNascimento); } public void unsetDataNascimento() { instance.unsetDataNascimento(); } public boolean isSetDataNascimento() { return instance.isSetDataNascimento(); } public void setDataNascimentoIsSet(boolean value) { instance.setDataNascimentoIsSet(value); } public long getSexo() { return instance.getSexo(); } public void setSexo(long sexo) { instance.setSexo(sexo); } public void unsetSexo() { instance.unsetSexo(); } public boolean isSetSexo() { return instance.isSetSexo(); } public void setSexoIsSet(boolean value) { instance.setSexoIsSet(value); } public long getLocalAtendimento() { return instance.getLocalAtendimento(); } public void setLocalAtendimento(long localAtendimento) { instance.setLocalAtendimento(localAtendimento); } public void unsetLocalAtendimento() { instance.unsetLocalAtendimento(); } public boolean isSetLocalAtendimento() { return instance.isSetLocalAtendimento(); } public void setLocalAtendimentoIsSet(boolean value) { instance.setLocalAtendimentoIsSet(value); } public int getPerguntasQuestionarioCriancasMenoresSeisMesesSize() { return instance.getPerguntasQuestionarioCriancasMenoresSeisMesesSize(); } public void addToPerguntasQuestionarioCriancasMenoresSeisMeses(PerguntaQuestionarioCriancasMenoresSeisMeses elem) { instance.addToPerguntasQuestionarioCriancasMenoresSeisMeses(elem.getInstance()); } public List<PerguntaQuestionarioCriancasMenoresSeisMeses> getPerguntasQuestionarioCriancasMenoresSeisMeses() { List <PerguntaQuestionarioCriancasMenoresSeisMeses> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasMenoresSeisMeses().forEach((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasMenoresSeisMeses (t)); }); return listaPerguntaQuestionario; } public void setPerguntasQuestionarioCriancasMenoresSeisMeses(List<PerguntaQuestionarioCriancasMenoresSeisMeses> perguntasQuestionarioCriancasMenoresSeisMeses) { List<PerguntaQuestionarioCriancasMenoresSeisMesesThrift> listaQuestionario = new LinkedList<>(); perguntasQuestionarioCriancasMenoresSeisMeses.forEach((elem) -> { listaQuestionario.add(elem.getInstance()); }); instance.setPerguntasQuestionarioCriancasMenoresSeisMeses(listaQuestionario); } public void unsetPerguntasQuestionarioCriancasMenoresSeisMeses() { instance.unsetPerguntasQuestionarioCriancasMenoresSeisMeses(); } public boolean isSetPerguntasQuestionarioCriancasMenoresSeisMeses() { return instance.isSetPerguntasQuestionarioCriancasMenoresSeisMeses(); } public void setPerguntasQuestionarioCriancasMenoresSeisMesesIsSet(boolean value) { instance.setPerguntasQuestionarioCriancasMenoresSeisMesesIsSet(value); } public int getPerguntasQuestionarioCriancasDeSeisVinteTresMesesSize() { return instance.getPerguntasQuestionarioCriancasDeSeisVinteTresMesesSize(); } public Iterator<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> getPerguntasQuestionarioCriancasDeSeisVinteTresMesesIterator() { List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasDeSeisVinteTresMesesIterator().forEachRemaining((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasDeSeisVinteTresMeses (t)); }); return listaPerguntaQuestionario.iterator(); } public void addToPerguntasQuestionarioCriancasDeSeisVinteTresMeses(PerguntaQuestionarioCriancasDeSeisVinteTresMeses elem) { instance.addToPerguntasQuestionarioCriancasDeSeisVinteTresMeses(elem.getInstance()); } public List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> getPerguntasQuestionarioCriancasDeSeisVinteTresMeses() { List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasDeSeisVinteTresMeses().forEach((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasDeSeisVinteTresMeses(t)); }); return listaPerguntaQuestionario; } public void setPerguntasQuestionarioCriancasDeSeisVinteTresMeses(List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> perguntasQuestionarioCriancasDeSeisVinteTresMeses) { List<PerguntaQuestionarioCriancasDeSeisVinteTresMesesThrift>listaQuestionario = new LinkedList<>(); perguntasQuestionarioCriancasDeSeisVinteTresMeses.forEach((elem)->{ listaQuestionario.add(elem.getInstance()); }); instance.setPerguntasQuestionarioCriancasDeSeisVinteTresMeses(listaQuestionario); } public void unsetPerguntasQuestionarioCriancasDeSeisVinteTresMeses() { instance.unsetPerguntasQuestionarioCriancasDeSeisVinteTresMeses(); } public boolean isSetPerguntasQuestionarioCriancasDeSeisVinteTresMeses() { return instance.isSetPerguntasQuestionarioCriancasDeSeisVinteTresMeses(); } public void setPerguntasQuestionarioCriancasDeSeisVinteTresMesesIsSet(boolean value) { instance.setPerguntasQuestionarioCriancasDeSeisVinteTresMesesIsSet(value); } public int getPerguntasQuestionarioCriancasComMaisDoisAnosSize() { return instance.getPerguntasQuestionarioCriancasComMaisDoisAnosSize(); } public Iterator<PerguntaQuestionarioCriancasComMaisDoisAnos> getPerguntasQuestionarioCriancasComMaisDoisAnosIterator() { List<PerguntaQuestionarioCriancasComMaisDoisAnos> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasComMaisDoisAnosIterator().forEachRemaining((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasComMaisDoisAnos(t)); }); return listaPerguntaQuestionario.iterator(); } public void addToPerguntasQuestionarioCriancasComMaisDoisAnos(PerguntaQuestionarioCriancasComMaisDoisAnos elem) { instance.addToPerguntasQuestionarioCriancasComMaisDoisAnos(elem.getInstance()); } public List<PerguntaQuestionarioCriancasComMaisDoisAnos> getPerguntasQuestionarioCriancasComMaisDoisAnos() { List<PerguntaQuestionarioCriancasComMaisDoisAnos> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasComMaisDoisAnos().forEach((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasComMaisDoisAnos(t)); }); return listaPerguntaQuestionario; } public void setPerguntasQuestionarioCriancasComMaisDoisAnos(List<PerguntaQuestionarioCriancasComMaisDoisAnos> perguntasQuestionarioCriancasComMaisDoisAnos) { List<PerguntaQuestionarioCriancasComMaisDoisAnosThrift> listaPerguntaQuestionario = new LinkedList<>(); perguntasQuestionarioCriancasComMaisDoisAnos.forEach((elem)->{ listaPerguntaQuestionario.add(elem.getInstance()); }); instance.setPerguntasQuestionarioCriancasComMaisDoisAnos(listaPerguntaQuestionario); } public void unsetPerguntasQuestionarioCriancasComMaisDoisAnos() { instance.unsetPerguntasQuestionarioCriancasComMaisDoisAnos(); } public boolean isSetPerguntasQuestionarioCriancasComMaisDoisAnos() { return instance.isSetPerguntasQuestionarioCriancasComMaisDoisAnos(); } public void setPerguntasQuestionarioCriancasComMaisDoisAnosIsSet(boolean value) { instance.setPerguntasQuestionarioCriancasComMaisDoisAnosIsSet(value); } public String getUuidFicha() { return instance.getUuidFicha(); } public void setUuidFicha(String uuidFicha) { instance.setUuidFicha(uuidFicha); } public void unsetUuidFicha() { instance.unsetUuidFicha(); } public boolean isSetUuidFicha() { return instance.isSetUuidFicha(); } public void setUuidFichaIsSet(boolean value) { instance.setUuidFichaIsSet(value); } public int getTpCdsOrigem() { return instance.getTpCdsOrigem(); } public void setTpCdsOrigem(int tpCdsOrigem) { instance.setTpCdsOrigem(tpCdsOrigem); } public void unsetTpCdsOrigem() { instance.unsetTpCdsOrigem(); } public boolean isSetTpCdsOrigem() { return instance.isSetTpCdsOrigem(); } public void setTpCdsOrigemIsSet(boolean value) { instance.setTpCdsOrigemIsSet(value); } public boolean equals(FichaConsumoAlimentar that) { return instance.equals(that.instance); } public int compareTo(FichaConsumoAlimentar other) { return instance.compareTo(other.instance); } }
src/main/java/com/github/Eddyosos/integracao20171/esus/cds/consumoalimentar/FichaConsumoAlimentar.java
package com.github.Eddyosos.integracao20171.esus.cds.consumoalimentar; import br.gov.saude.esus.cds.transport.generated.thrift.common.UnicaLotacaoHeaderThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.FichaConsumoAlimentarThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.PerguntaQuestionarioCriancasComMaisDoisAnosThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.PerguntaQuestionarioCriancasDeSeisVinteTresMesesThrift; import br.gov.saude.esus.cds.transport.generated.thrift.consumoalimentar.PerguntaQuestionarioCriancasMenoresSeisMesesThrift; import com.github.Eddyosos.integracao20171.esus.cds.common.UnicaLotacaoHeader; import java.util.Iterator; import java.util.LinkedList; import java.util.List; public class FichaConsumoAlimentar { private FichaConsumoAlimentarThrift instance = new FichaConsumoAlimentarThrift(); protected FichaConsumoAlimentar(FichaConsumoAlimentarThrift instance){ this.instance = instance; } public FichaConsumoAlimentar deepCopy() { return new FichaConsumoAlimentar(instance.deepCopy()); } public UnicaLotacaoHeader getHeaderTransport() { return new UnicaLotacaoHeader(instance.getHeaderTransport()); } public void unsetHeaderTransport() { instance.unsetHeaderTransport(); } public boolean isSetHeaderTransport() { return instance.isSetHeaderTransport(); } public void setHeaderTransportIsSet(boolean value) { instance.setHeaderTransportIsSet(value); } public String getNumeroCartaoSus() { return instance.getNumeroCartaoSus(); } public void setNumeroCartaoSus(String numeroCartaoSus) { instance.setNumeroCartaoSus(numeroCartaoSus); } public void unsetNumeroCartaoSus() { instance.unsetNumeroCartaoSus(); } public void setNumeroCartaoSusIsSet(boolean value) { instance.setNumeroCartaoSusIsSet(value); } public String getIdentificacaoUsuario() { return instance.getIdentificacaoUsuario(); } public void setIdentificacaoUsuario(String identificacaoUsuario) { instance.setIdentificacaoUsuario(identificacaoUsuario); } public void unsetIdentificacaoUsuario() { instance.unsetIdentificacaoUsuario(); } public boolean isSetIdentificacaoUsuario() { return instance.isSetIdentificacaoUsuario(); } public void setIdentificacaoUsuarioIsSet(boolean value) { instance.setIdentificacaoUsuarioIsSet(value); } public long getDataNascimento() { return instance.getDataNascimento(); } public void setDataNascimento(long dataNascimento) { instance.setDataNascimento(dataNascimento); } public void unsetDataNascimento() { instance.unsetDataNascimento(); } public boolean isSetDataNascimento() { return instance.isSetDataNascimento(); } public void setDataNascimentoIsSet(boolean value) { instance.setDataNascimentoIsSet(value); } public long getSexo() { return instance.getSexo(); } public void setSexo(long sexo) { instance.setSexo(sexo); } public void unsetSexo() { instance.unsetSexo(); } public boolean isSetSexo() { return instance.isSetSexo(); } public void setSexoIsSet(boolean value) { instance.setSexoIsSet(value); } public long getLocalAtendimento() { return instance.getLocalAtendimento(); } public void setLocalAtendimento(long localAtendimento) { instance.setLocalAtendimento(localAtendimento); } public void unsetLocalAtendimento() { instance.unsetLocalAtendimento(); } public boolean isSetLocalAtendimento() { return instance.isSetLocalAtendimento(); } public void setLocalAtendimentoIsSet(boolean value) { instance.setLocalAtendimentoIsSet(value); } public int getPerguntasQuestionarioCriancasMenoresSeisMesesSize() { return instance.getPerguntasQuestionarioCriancasMenoresSeisMesesSize(); } public void addToPerguntasQuestionarioCriancasMenoresSeisMeses(PerguntaQuestionarioCriancasMenoresSeisMeses elem) { instance.addToPerguntasQuestionarioCriancasMenoresSeisMeses(elem.getInstance()); } public List<PerguntaQuestionarioCriancasMenoresSeisMeses> getPerguntasQuestionarioCriancasMenoresSeisMeses() { List <PerguntaQuestionarioCriancasMenoresSeisMeses> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasMenoresSeisMeses().forEach((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasMenoresSeisMeses (t)); }); return listaPerguntaQuestionario; } public void setPerguntasQuestionarioCriancasMenoresSeisMeses(List<PerguntaQuestionarioCriancasMenoresSeisMeses> perguntasQuestionarioCriancasMenoresSeisMeses) { List<PerguntaQuestionarioCriancasMenoresSeisMesesThrift> listaQuestionario = new LinkedList<>(); perguntasQuestionarioCriancasMenoresSeisMeses.forEach((elem) -> { listaQuestionario.add(elem.getInstance()); }); instance.setPerguntasQuestionarioCriancasMenoresSeisMeses(listaQuestionario); } public void unsetPerguntasQuestionarioCriancasMenoresSeisMeses() { instance.unsetPerguntasQuestionarioCriancasMenoresSeisMeses(); } public boolean isSetPerguntasQuestionarioCriancasMenoresSeisMeses() { return instance.isSetPerguntasQuestionarioCriancasMenoresSeisMeses(); } public void setPerguntasQuestionarioCriancasMenoresSeisMesesIsSet(boolean value) { instance.setPerguntasQuestionarioCriancasMenoresSeisMesesIsSet(value); } public int getPerguntasQuestionarioCriancasDeSeisVinteTresMesesSize() { return instance.getPerguntasQuestionarioCriancasDeSeisVinteTresMesesSize(); } public Iterator<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> getPerguntasQuestionarioCriancasDeSeisVinteTresMesesIterator() { List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasDeSeisVinteTresMesesIterator().forEachRemaining((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasDeSeisVinteTresMeses (t)); }); return listaPerguntaQuestionario.iterator(); } public void addToPerguntasQuestionarioCriancasDeSeisVinteTresMeses(PerguntaQuestionarioCriancasDeSeisVinteTresMeses elem) { instance.addToPerguntasQuestionarioCriancasDeSeisVinteTresMeses(elem.getInstance()); } public List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> getPerguntasQuestionarioCriancasDeSeisVinteTresMeses() { List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasDeSeisVinteTresMeses().forEach((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasDeSeisVinteTresMeses(t)); }); return listaPerguntaQuestionario; } public void setPerguntasQuestionarioCriancasDeSeisVinteTresMeses(List<PerguntaQuestionarioCriancasDeSeisVinteTresMeses> perguntasQuestionarioCriancasDeSeisVinteTresMeses) { List<PerguntaQuestionarioCriancasDeSeisVinteTresMesesThrift>listaQuestionario = new LinkedList<>(); perguntasQuestionarioCriancasDeSeisVinteTresMeses.forEach((elem)->{ listaQuestionario.add(elem.getInstance()); }); instance.setPerguntasQuestionarioCriancasDeSeisVinteTresMeses(listaQuestionario); } public void unsetPerguntasQuestionarioCriancasDeSeisVinteTresMeses() { instance.unsetPerguntasQuestionarioCriancasDeSeisVinteTresMeses(); } public boolean isSetPerguntasQuestionarioCriancasDeSeisVinteTresMeses() { return instance.isSetPerguntasQuestionarioCriancasDeSeisVinteTresMeses(); } public void setPerguntasQuestionarioCriancasDeSeisVinteTresMesesIsSet(boolean value) { instance.setPerguntasQuestionarioCriancasDeSeisVinteTresMesesIsSet(value); } public int getPerguntasQuestionarioCriancasComMaisDoisAnosSize() { return instance.getPerguntasQuestionarioCriancasComMaisDoisAnosSize(); } public Iterator<PerguntaQuestionarioCriancasComMaisDoisAnos> getPerguntasQuestionarioCriancasComMaisDoisAnosIterator() { List<PerguntaQuestionarioCriancasComMaisDoisAnos> listaPerguntaQuestionario = new LinkedList<>(); instance.getPerguntasQuestionarioCriancasComMaisDoisAnosIterator().forEachRemaining((t)->{ listaPerguntaQuestionario.add(new PerguntaQuestionarioCriancasComMaisDoisAnos(t)); }); return listaPerguntaQuestionario.iterator(); } public void addToPerguntasQuestionarioCriancasComMaisDoisAnos(PerguntaQuestionarioCriancasComMaisDoisAnos elem) { instance.addToPerguntasQuestionarioCriancasComMaisDoisAnos(elem.getInstance()); } public List<PerguntaQuestionarioCriancasComMaisDoisAnosThrift> getPerguntasQuestionarioCriancasComMaisDoisAnos() { return instance.getPerguntasQuestionarioCriancasComMaisDoisAnos(); } public void setPerguntasQuestionarioCriancasComMaisDoisAnos(List<PerguntaQuestionarioCriancasComMaisDoisAnosThrift> perguntasQuestionarioCriancasComMaisDoisAnos) { instance.setPerguntasQuestionarioCriancasComMaisDoisAnos(perguntasQuestionarioCriancasComMaisDoisAnos); } public void unsetPerguntasQuestionarioCriancasComMaisDoisAnos() { instance.unsetPerguntasQuestionarioCriancasComMaisDoisAnos(); } public boolean isSetPerguntasQuestionarioCriancasComMaisDoisAnos() { return instance.isSetPerguntasQuestionarioCriancasComMaisDoisAnos(); } public void setPerguntasQuestionarioCriancasComMaisDoisAnosIsSet(boolean value) { instance.setPerguntasQuestionarioCriancasComMaisDoisAnosIsSet(value); } public String getUuidFicha() { return instance.getUuidFicha(); } public void setUuidFicha(String uuidFicha) { instance.setUuidFicha(uuidFicha); } public void unsetUuidFicha() { instance.unsetUuidFicha(); } public boolean isSetUuidFicha() { return instance.isSetUuidFicha(); } public void setUuidFichaIsSet(boolean value) { instance.setUuidFichaIsSet(value); } public int getTpCdsOrigem() { return instance.getTpCdsOrigem(); } public void setTpCdsOrigem(int tpCdsOrigem) { instance.setTpCdsOrigem(tpCdsOrigem); } public void unsetTpCdsOrigem() { instance.unsetTpCdsOrigem(); } public boolean isSetTpCdsOrigem() { return instance.isSetTpCdsOrigem(); } public void setTpCdsOrigemIsSet(boolean value) { instance.setTpCdsOrigemIsSet(value); } public boolean equals(FichaConsumoAlimentar that) { return instance.equals(that.instance); } public int compareTo(FichaConsumoAlimentar other) { return instance.compareTo(other.instance); } }
Modificações nos arquivos java Modificações em FichaConsumoAlimentar.java, PerguntaQuestionarioCriancasMenoresSeisMeses.java, PerguntaQuestionarioCriancasComMaisDoisAnos.java, PerguntaQuestionarioCriancasDeSeisVinteTresMeses.java encapsulamento
src/main/java/com/github/Eddyosos/integracao20171/esus/cds/consumoalimentar/FichaConsumoAlimentar.java
Modificações nos arquivos java
Java
mit
a5541f0b471c4ebf8d112345bef76b13ebdb6e54
0
fredyw/leetcode,fredyw/leetcode,fredyw/leetcode,fredyw/leetcode
package leetcode; import java.util.HashMap; import java.util.Map; /** * https://leetcode.com/problems/guess-number-higher-or-lower-ii/ */ public class Problem375 { public int getMoneyAmount(int n) { return getMoneyAmount(n, 1, n, new HashMap<>()); } private static int getMoneyAmount(int n, int start, int end, Map<String, Integer> memo) { if (start >= end) { return 0; } String key = start + "|" + end; if (memo.containsKey(key)) { return memo.get(key); } int min = Integer.MAX_VALUE; for (int i = start; i <= end; i++) { int max = Math.max(getMoneyAmount(n, start, i - 1, memo), getMoneyAmount(n, i + 1, end, memo)) + i; min = Math.min(min, max); } memo.put(key, min); return min; } }
src/main/java/leetcode/Problem375.java
package leetcode; /** * https://leetcode.com/problems/guess-number-higher-or-lower-ii/ */ public class Problem375 { public int getMoneyAmount(int n) { // TODO return 0; } public static void main(String[] args) { Problem375 prob = new Problem375(); System.out.println(prob.getMoneyAmount(10)); // 16 System.out.println(prob.getMoneyAmount(15)); // 30 System.out.println(prob.getMoneyAmount(5)); // 6 } }
Solve problem 375
src/main/java/leetcode/Problem375.java
Solve problem 375
Java
mit
7c327a9662aed2df70259dc5e907b770ce65214f
0
curiosone-bot/curiosone-core
package com.github.bot.curiosone.core.knowledge; import java.util.List; import java.util.Objects; /** * Handles interrogations to the SemanticNetwork. * Provides methods to create and manage an interrogation. */ public class SemanticQuery { /** * Stores the SemanticRelationType of this interrogation. */ private SemanticRelationType relation; /** * Stores the subject of this interrogaiton. */ private String subject; /** * Stores the object of this interrogaiton. */ private String object; /** * Lists the adjectives of the object of this SemanticQuery. */ private List<String> objAdjectives; /** * Stores the verb of this verb. */ private String verb; /** * Constructs this SemanticQuery. * @param relation the SemanticRelationType of this SemanticQuery * @param subject the subject of this SemanticQuery * @param object the object of this SemanticQuery * @param adjectives the List of the adjectives of this SemanticQuery * @param verb the verb of this SemanticQuery */ public SemanticQuery(SemanticRelationType relation, String subject,String object, List<String> adjectives, String verb) { this.relation = relation; this.subject = subject; this.object = object; this.objAdjectives = adjectives; this.verb = verb; } /** * Constructs this SemanticQuery. * @param relation the SemanticRelationType of this SemanticQuery * @param subject the subject of this SemanticQuery * @param object the object of this SemanticQuery * @param verb the verb of this SemanticQuery */ public SemanticQuery(SemanticRelationType relation,String subject, String object, String verb) { this(relation, subject, object, null, verb); } /** * Constructs this SemanticQuery. * @param relation the SemanticRelationType of this SemanticQuery * @param object the object of this SemanticQuery * @param adjectives the List of the adjectives of this SemanticQuery * @param verb the verb of this SemanticQuery */ public SemanticQuery(SemanticRelationType relation, String object, List<String> adjectives, String verb) { this(relation, null, object, adjectives, verb); } /** * Constructs this SemanticQuery. * @param relation the SemanticRelationType of this SemanticQuery * @param object the object of this SemanticQuery * @param verb the verb of this SemanticQuery */ public SemanticQuery(SemanticRelationType relation, String object, String verb) { this(relation, null, object, null, verb); } /** * Returns the object of this SemanticQuery. */ public String getObject() { return object; } /** * Returns the subject of this SemanticQuery. */ public String getSubject() { return subject; } /** * Returns a List containing all the adjectives of this SemanticQuery. */ public List<String> getAdjectives() { return objAdjectives; } /** * Returns the relation of this SemanticQuery. */ public SemanticRelationType getRelation() { return relation; } /** * Returns the verb of this SemanticQuery. */ public String getVerb() { return verb; } /** * Checks whether this SemanticQuery equals to the given Object. * @param other the other object to be compared against * @return {@code true} if this SemanticQuery equals to the given object; * {@code false} otherwise */ @Override public boolean equals(Object other) { if (other == this) { return true; } if (other == null || other.getClass() != this.getClass()) { return false; } SemanticQuery that = (SemanticQuery) other; return this.relation.equals(that.relation) && this.subject.equals(that.subject) && this.object.equals(that.object) && this.verb.equals(that.verb); } /** * Returns the HashCode of this SemanticQuery. * The HashCode depends on the relation, subject, object and verb of this SemanticQuery. */ @Override public int hashCode() { return Objects.hash(relation, subject, object, verb); } }
src/main/java/com/github/bot/curiosone/core/knowledge/SemanticQuery.java
package com.github.bot.curiosone.core.knowledge; import java.util.List; import java.util.Objects; /** * Resumes a semantic query parameters. * */ public class SemanticQuery { private SemanticRelationType relation; private String subject; private String object; private List<String> objAdjectives; private String verb; /** * Constructor. * @param relation {@link Relation} * @param subject Affirmation subject. * @param object Question/affirmation object * @param adjectives Object adjectives * @param verb Sentence verb. */ public SemanticQuery(SemanticRelationType relation, String subject,String object, List<String> adjectives, String verb) { this.relation = relation; this.subject = subject; this.object = object; this.objAdjectives = adjectives; this.verb = verb; } public SemanticQuery(SemanticRelationType relation,String subject, String object, String verb) { this(relation, subject, object, null, verb); } public SemanticQuery(SemanticRelationType relation, String object, List<String> adjectives, String verb) { this(relation, null, object, adjectives, verb); } public SemanticQuery(SemanticRelationType relation, String object, String verb) { this(relation, null, object, null, verb); } public String getObject() { return object; } public String getSubject() { return subject; } public List<String> getAdjectives() { return objAdjectives; } public SemanticRelationType getRelation() { return relation; } public String getVerb() { return verb; } @Override public boolean equals(Object other) { if (other == this) { return true; } if (other == null || other.getClass() != this.getClass()) { return false; } SemanticQuery that = (SemanticQuery) other; return this.relation.equals(that.relation) && this.subject.equals(that.subject) && this.object.equals(that.object) && this.verb.equals(that.verb); } @Override public int hashCode() { return Objects.hash(relation, subject, object, verb); } }
add SemanticQuery javadoc
src/main/java/com/github/bot/curiosone/core/knowledge/SemanticQuery.java
add SemanticQuery javadoc
Java
agpl-3.0
d556330e26f9817ad920510469b547220193224f
0
openEHR/adl2-core,bjornna/adl2-core,lonangel/adl2-core
/* * ADL2-core * Copyright (c) 2013-2014 Marand d.o.o. (www.marand.com) * * This file is part of ADL2-core. * * ADL2-core is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openehr.adl.serializer; import com.google.common.base.CaseFormat; import com.google.common.collect.ImmutableSet; import org.openehr.jaxb.am.*; import org.openehr.jaxb.rm.*; import javax.annotation.Nonnull; import java.beans.BeanInfo; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * @author Marko Pipan */ public class DAdlSerializer { private final AdlStringBuilder builder; public DAdlSerializer(AdlStringBuilder builder) { this.builder = builder; } private static String getAttributeForField(@Nonnull String fieldName) { return CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, fieldName); } public void serialize(Object obj) { builder.append("<"); serializePlain(obj); builder.append(">"); } public void serializePlain(Object obj) { if (obj == null) { } else if (obj instanceof String) { builder.text((String) obj); } else if (obj instanceof List) { serializeListMap((List) obj); } else if (obj instanceof CodePhrase) { serializeCodePhrase((CodePhrase) obj); } else { serializeBean(obj); } } public void serializeBean(Object obj) { serializeBean(obj, ImmutableSet.<String>of()); } /** * Serializes bean, without wrapping it with &lt;/&gt; * * @param obj bean to serialize * @param attributesToIgnore attributes that are not written to dadl */ public void serializeBean(Object obj, Set<String> attributesToIgnore) { builder.newIndentedline(); try { BeanInfo info = Introspector.getBeanInfo(obj.getClass()); List<NameValue> values = new ArrayList<>(); for (PropertyDescriptor pd : info.getPropertyDescriptors()) { if (pd.getName().equals("class")) continue; Object value = pd.getReadMethod().invoke(obj); if (value == null) continue; if (value instanceof List && ((List) value).isEmpty()) continue; String attribute = getAttributeForField(pd.getName()); if (!attributesToIgnore.contains(attribute)) { values.add(new NameValue(attribute, value, value instanceof List && isPlainType(((List) value).get(0)))); } } for (int i = 0; i < values.size(); i++) { NameValue value = values.get(i); builder.append(value.name).append(" = "); if (value.plain) { serializePlain(value.value); } else { serialize(value.value); } if (i < values.size() - 1) { builder.newline(); } } } catch (Exception e) { throw new RuntimeException(e); } builder.unindent().newline(); } private void serializeCodePhrase(CodePhrase cp) { builder.append("[").append(cp.getTerminologyId().getValue()).append("::").append(cp.getCodeString()).append("]"); } private void serializeListMap(List list) { if (!list.isEmpty()) { Object o = list.get(0); if (isPlainType(o)) { builder.append("<"); for (int i = 0; i < list.size(); i++) { Object item = list.get(i); serializePlain(item); if (i < list.size() - 1) { builder.append(", "); } } builder.append(">"); return; } } builder.newIndentedline(); for (int i = 0; i < list.size(); i++) { Object item = list.get(i); serializeItem(item); if (i < list.size() - 1) { builder.newline(); } } builder.unindent().newline(); } private boolean isPlainType(Object o) { return o instanceof String || o instanceof CodePhrase; } private void serializeItem(Object item) { // System.out.println("serializeItem:" + item); if (item instanceof TranslationDetails) { TranslationDetails td = (TranslationDetails) item; serializeKey(td.getLanguage().getCodeString()); serialize(item); } else if (item instanceof StringDictionaryItem) { StringDictionaryItem sdi = (StringDictionaryItem) item; serializeKey(sdi.getId()); serialize(sdi.getValue()); } else if (item instanceof ResourceDescriptionItem) { ResourceDescriptionItem rdi = (ResourceDescriptionItem) item; serializeKey(rdi.getLanguage().getCodeString()); serialize(rdi); } else if (item instanceof CodeDefinitionSet) { CodeDefinitionSet cds = (CodeDefinitionSet) item; serializeKey(cds.getLanguage()); // builder.append("<").newIndentedline().append("items = "); serialize(cds.getItems()); // builder.unindent().newline().append(">"); } else if (item instanceof ArchetypeTerm) { ArchetypeTerm at = (ArchetypeTerm) item; serializeKey(at.getCode()); List<StringDictionaryItem> items = at.getItems(); builder.append("<").indent(); for (StringDictionaryItem s : items) { builder.newline().append(s.getId()).append(" = ").dadl(s.getValue()); } builder.unindent().newline().append(">"); } else if (item instanceof ValueSetItem) { ValueSetItem vsi = (ValueSetItem) item; serializeKey(vsi.getId()); serialize(item); } else if (item instanceof ResourceAnnotationNodes) { ResourceAnnotationNodes vsi = (ResourceAnnotationNodes) item; serializeKey(vsi.getLanguage()); builder.append("<"); serializeBean(vsi, ImmutableSet.of("language")); builder.append(">"); } else if (item instanceof ResourceAnnotationNodeItems) { ResourceAnnotationNodeItems vsi = (ResourceAnnotationNodeItems) item; serializeKey(vsi.getPath()); builder.append("<"); serializeBean(vsi, ImmutableSet.of("path")); builder.append(">"); } else if (item instanceof String) { serializePlain(item); } else if (item instanceof ConstraintBindingSet) { ConstraintBindingSet set = (ConstraintBindingSet) item; serializeKey(set.getTerminology()); builder.append("<"); // builder.append("<").newIndentedline().append("items = <"); serializeListMap(set.getItems()); // builder.append(">").unindent(); builder.newline().append(">"); } else if (item instanceof ConstraintBindingItem) { ConstraintBindingItem c = (ConstraintBindingItem) item; serializeKey(c.getCode()); builder.append("<").append(c.getValue()).append(">"); } else if (item instanceof TermBindingSet) { TermBindingSet t = (TermBindingSet) item; serializeKey(t.getTerminology()); builder.append("<"); // builder.append("<").newIndentedline().append("items = <"); serializeListMap(t.getItems()); // builder.append(">").unindent(); builder.newline().append(">"); } else if (item instanceof TermBindingItem) { TermBindingItem t = (TermBindingItem) item; serializeKey(t.getCode()); serialize(t.getValue()); } else { throw new IllegalArgumentException(item.getClass().getName()); } } private void serializeKey(String key) { builder.append("[").text(key).append("] = "); } private static class NameValue { final String name; final Object value; final boolean plain; private NameValue(String name, Object value, boolean plain) { this.name = name; this.value = value; this.plain = plain; } } }
adl-parser/src/main/java/org/openehr/adl/serializer/DAdlSerializer.java
/* * ADL2-core * Copyright (c) 2013-2014 Marand d.o.o. (www.marand.com) * * This file is part of ADL2-core. * * ADL2-core is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openehr.adl.serializer; import com.google.common.base.CaseFormat; import com.google.common.collect.ImmutableSet; import org.openehr.jaxb.am.*; import org.openehr.jaxb.rm.*; import javax.annotation.Nonnull; import java.beans.BeanInfo; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * @author Marko Pipan */ public class DAdlSerializer { private final AdlStringBuilder builder; public DAdlSerializer(AdlStringBuilder builder) { this.builder = builder; } private static String getAttributeForField(@Nonnull String fieldName) { return CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, fieldName); } public void serialize(Object obj) { builder.append("<"); serializePlain(obj); builder.append(">"); } public void serializePlain(Object obj) { if (obj == null) { } else if (obj instanceof String) { builder.text((String) obj); } else if (obj instanceof List) { serializeListMap((List) obj); } else if (obj instanceof CodePhrase) { serializeCodePhrase((CodePhrase) obj); } else { serializeBean(obj); } } public void serializeBean(Object obj) { serializeBean(obj, ImmutableSet.<String>of()); } /** * Serializes bean, without wrapping it with &lt;/&gt; * * @param obj bean to serialize * @param attributesToIgnore attributes that are not written to dadl */ public void serializeBean(Object obj, Set<String> attributesToIgnore) { builder.newIndentedline(); try { BeanInfo info = Introspector.getBeanInfo(obj.getClass()); List<NameValue> values = new ArrayList<>(); for (PropertyDescriptor pd : info.getPropertyDescriptors()) { if (pd.getName().equals("class")) continue; Object value = pd.getReadMethod().invoke(obj); if (value == null) continue; if (value instanceof List && ((List) value).isEmpty()) continue; String attribute = getAttributeForField(pd.getName()); if (!attributesToIgnore.contains(attribute)) { values.add(new NameValue(attribute, value, value instanceof List && isPlainType(((List) value).get(0)))); } } for (int i = 0; i < values.size(); i++) { NameValue value = values.get(i); builder.append(value.name).append(" = "); if (value.plain) { serializePlain(value.value); } else { serialize(value.value); } if (i < values.size() - 1) { builder.newline(); } } } catch (Exception e) { throw new RuntimeException(e); } builder.unindent().newline(); } private void serializeCodePhrase(CodePhrase cp) { builder.append("[").append(cp.getTerminologyId().getValue()).append("::").append(cp.getCodeString()).append("]"); } private void serializeListMap(List list) { if (!list.isEmpty()) { Object o = list.get(0); if (isPlainType(o)) { builder.append("<"); for (int i = 0; i < list.size(); i++) { Object item = list.get(i); serializePlain(item); if (i < list.size() - 1) { builder.append(", "); } } builder.append(">"); return; } } builder.newIndentedline(); for (int i = 0; i < list.size(); i++) { Object item = list.get(i); serializeItem(item); if (i < list.size() - 1) { builder.newline(); } } builder.unindent().newline(); } private boolean isPlainType(Object o) { return o instanceof String || o instanceof CodePhrase; } private void serializeItem(Object item) { // System.out.println("serializeItem:" + item); if (item instanceof TranslationDetails) { TranslationDetails td = (TranslationDetails) item; serializeKey(td.getLanguage().getCodeString()); serialize(item); } else if (item instanceof StringDictionaryItem) { StringDictionaryItem sdi = (StringDictionaryItem) item; serializeKey(sdi.getId()); serialize(sdi.getValue()); } else if (item instanceof ResourceDescriptionItem) { ResourceDescriptionItem rdi = (ResourceDescriptionItem) item; serializeKey(rdi.getLanguage().getCodeString()); serialize(rdi); } else if (item instanceof CodeDefinitionSet) { CodeDefinitionSet cds = (CodeDefinitionSet) item; serializeKey(cds.getLanguage()); builder.append("<").newIndentedline().append("items = "); serialize(cds.getItems()); builder.unindent().newline().append(">"); //builder.newline().append(">"); } else if (item instanceof ArchetypeTerm) { ArchetypeTerm at = (ArchetypeTerm) item; serializeKey(at.getCode()); List<StringDictionaryItem> items = at.getItems(); builder.append("<").indent(); for (StringDictionaryItem s : items) { builder.newline().append(s.getId()).append(" = ").dadl(s.getValue()); } builder.unindent().newline().append(">"); } else if (item instanceof ValueSetItem) { ValueSetItem vsi = (ValueSetItem) item; serializeKey(vsi.getId()); serialize(item); } else if (item instanceof ResourceAnnotationNodes) { ResourceAnnotationNodes vsi = (ResourceAnnotationNodes) item; serializeKey(vsi.getLanguage()); builder.append("<"); serializeBean(vsi, ImmutableSet.of("language")); builder.append(">"); } else if (item instanceof ResourceAnnotationNodeItems) { ResourceAnnotationNodeItems vsi = (ResourceAnnotationNodeItems) item; serializeKey(vsi.getPath()); builder.append("<"); serializeBean(vsi, ImmutableSet.of("path")); builder.append(">"); } else if (item instanceof String) { serializePlain(item); } else if (item instanceof ConstraintBindingSet) { ConstraintBindingSet set = (ConstraintBindingSet) item; serializeKey(set.getTerminology()); builder.append("<").newIndentedline().append("items = <"); serializeListMap(set.getItems()); builder.append(">"); builder.unindent().newline().append(">"); } else if (item instanceof ConstraintBindingItem) { ConstraintBindingItem c = (ConstraintBindingItem) item; serializeKey(c.getCode()); //serializeItem(c.getValue()); builder.append("<").append(c.getValue()).append(">"); } else if (item instanceof TermBindingSet) { TermBindingSet t = (TermBindingSet) item; serializeKey(t.getTerminology()); builder.append("<").newIndentedline().append("items = <"); serializeListMap(t.getItems()); builder.append(">"); builder.unindent().newline().append(">"); } else if (item instanceof TermBindingItem) { TermBindingItem t = (TermBindingItem) item; serializeKey(t.getCode()); serialize(t.getValue()); } else { throw new IllegalArgumentException(item.getClass().getName()); } } private void serializeKey(String key) { builder.append("[").text(key).append("] = "); } private static class NameValue { final String name; final Object value; final boolean plain; private NameValue(String name, Object value, boolean plain) { this.name = name; this.value = value; this.plain = plain; } } }
Remove items from ConstraintBindingSet and TermBindingSet
adl-parser/src/main/java/org/openehr/adl/serializer/DAdlSerializer.java
Remove items from ConstraintBindingSet and TermBindingSet
Java
agpl-3.0
0d59856df29fa2c2b09e1ce11a101829722cd04d
0
genepi/imputationserver
package genepi.imputationserver.steps.qc; import genepi.hadoop.CacheStore; import genepi.hadoop.HdfsUtil; import genepi.hadoop.ParameterStore; import genepi.hadoop.PreferenceStore; import genepi.hadoop.io.HdfsLineWriter; import genepi.imputationserver.steps.vcf.VcfChunk; import genepi.imputationserver.util.ChiSquareObject; import genepi.imputationserver.util.GenomicTools; import genepi.io.FileUtil; import genepi.io.legend.LegendEntry; import genepi.io.legend.LegendFileReader; import genepi.io.text.LineReader; import java.io.File; import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.vcf.VCFCodec; import org.broadinstitute.variant.vcf.VCFFileReader; import org.broadinstitute.variant.vcf.VCFHeaderVersion; public class QualityControlMapper extends Mapper<LongWritable, Text, Text, Text> { private static final double CALL_RATE = 0.5; private static final int MIN_SNPS = 3; private static final double OVERLAP = 0.5; private String folder; private LegendFileReader legendReader; private String oldChromosome = ""; private String legendPattern; private String legendFile; private String population; private String output; private String outputRemovedSnps; private int lastPos = 0; private int phasingWindow; protected void setup(Context context) throws IOException, InterruptedException { // read parameters ParameterStore parameters = new ParameterStore(context); legendPattern = parameters.get(QualityControlJob.LEGEND_PATTERN); population = parameters.get(QualityControlJob.LEGEND_POPULATION); output = parameters.get(QualityControlJob.OUTPUT_MAF); outputRemovedSnps = parameters .get(QualityControlJob.OUTPUT_REMOVED_SNPS); String hdfsPath = parameters.get(QualityControlJob.LEGEND_HDFS); String legendFilename = FileUtil.getFilename(hdfsPath); // load files from cache CacheStore cache = new CacheStore(context.getConfiguration()); legendFile = cache.getArchive(legendFilename); // create temp directory PreferenceStore store = new PreferenceStore(context.getConfiguration()); folder = store.getString("minimac.tmp"); folder = FileUtil.path(folder, context.getTaskAttemptID().toString()); FileUtil.createDirectory(folder); phasingWindow = Integer.parseInt(store.getString("phasing.window")); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { // delete temp directory FileUtil.deleteDirectory(folder); } public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { if (value.toString() == null || value.toString().isEmpty()) { return; } VcfChunk chunk = new VcfChunk(value.toString()); String vcfFilename = FileUtil.path(folder, "minimac.vcf.gz"); String vcfFilenameIndex = FileUtil.path(folder, "minimac.vcf.gz.tbi"); HdfsUtil.get(chunk.getVcfFilename(), vcfFilename); HdfsUtil.get(chunk.getIndexFilename(), vcfFilenameIndex); // int errors = 0; HdfsLineWriter statisticWriter = new HdfsLineWriter(HdfsUtil.path( output, chunk.toString())); HdfsLineWriter logWriter = new HdfsLineWriter(HdfsUtil.path( outputRemovedSnps, chunk.toString())); HdfsLineWriter chunkWriter = new HdfsLineWriter(HdfsUtil.path( outputRemovedSnps, "exlcude" + chunk.toString())); String hdfsFilename = chunk.getVcfFilename() + "_" + chunk.getId(); HdfsLineWriter newFileWriter = new HdfsLineWriter(hdfsFilename); // +/- phasingWindow (1 Mbases default) int start = chunk.getStart() - phasingWindow; if (start < 1) { start = 1; } int end = chunk.getEnd() + phasingWindow; LineReader reader = new LineReader(vcfFilename); VCFFileReader vcfReader = new VCFFileReader(new File(vcfFilename)); VCFCodec codec = new VCFCodec(); codec.setVCFHeader(vcfReader.getFileHeader(), VCFHeaderVersion.VCF4_1); int notFoundInLegend = 0; int foundInLegend = 0; int alleleMismatch = 0; int alleleSwitch = 0; int strandSwitch1 = 0; int strandSwitch2 = 0; int strandSwitch3 = 0; int match = 0; int lowCallRate = 0; int filtered = 0; int overallSnps = 0; int validSnps = 0; int monomorphic = 0; int alternativeAlleles = 0; int noSnps = 0; int duplicates = 0; int filterFlag = 0; int invalidAlleles = 0; int removedChunksSnps = 0; int removedChunksOverlap = 0; int removedChunksCallRate = 0; int[] snpsPerSampleCount = null; while (reader.next()) { String line = reader.get(); if (line.startsWith("#")) { newFileWriter.write(line); } else { String tiles[] = line.split("\t", 6); int position = Integer.parseInt(tiles[1]); String ref = tiles[3]; String alt = tiles[4]; boolean insideChunk = position >= chunk.getStart() && position <= chunk.getEnd(); // filter invalid alleles if (!GenomicTools.isValid(ref) || !GenomicTools.isValid(alt)) { if (insideChunk) { logWriter.write("Invalid Alleles: " + tiles[0] + " (" + ref + "/" + alt + ")"); invalidAlleles++; filtered++; } continue; } VariantContext snp = codec.decode(line); // count duplicates if ((lastPos == snp.getStart() && lastPos > 0)) { if (insideChunk) { duplicates++; logWriter.write("FILTER - Duplicate: " + snp.getID() + " - pos: " + snp.getStart()); //logWriter.write("COPY OF: " + tmp); filtered++; } lastPos = snp.getStart(); continue; } String tmp = "FILTER - Duplicate: " + snp.getID() + " - pos: " + snp.getStart(); // update last pos only when not filtered if (!snp.isFiltered()) { lastPos = snp.getStart(); } if (snpsPerSampleCount == null) { snpsPerSampleCount = new int[snp.getNSamples()]; for (int i = 0; i < snp.getNSamples(); i++) { snpsPerSampleCount[i] = 0; } } // filter flag if (snp.isFiltered()) { if (insideChunk) { if (snp.getFilters().contains("DUP")) { duplicates++; logWriter.write("FILTER - Duplicate " + snp.getID() + " - pos: " + snp.getStart()); filtered++; } else { logWriter .write("FILTER - Flag is set: " + snp.getID() + " - pos: " + snp.getStart()); filterFlag++; filtered++; } } continue; } // alternative allele frequency int hetVarOnes = snp.getHetCount(); int homVarOnes = snp.getHomVarCount() * 2; double af = (double) ((hetVarOnes + homVarOnes) / (double) (((snp .getNSamples() - snp.getNoCallCount()) * 2))); if (af > 0.5) { if (insideChunk) { alternativeAlleles++; } } // filter indels if (snp.isIndel() || snp.isComplexIndel()) { if (insideChunk) { logWriter.write("FILTER - InDel: " + snp.getID() + " - pos: " + snp.getStart()); noSnps++; filtered++; } continue; } // remove monomorphic snps // monomorphic only exclude 0/0; if (snp.isMonomorphicInSamples() || snp.getHetCount() == 2 * (snp.getNSamples() - snp .getNoCallCount())) { if (insideChunk) { // System.out.println(snp.getChr()+":"+snp.getStart()); logWriter.write("FILTER - Monomorphic: " + snp.getID() + " - pos: " + snp.getStart()); monomorphic++; filtered++; } continue; } LegendEntry refSnp = getReader(snp.getChr()).findByPosition2( snp.getStart()); // not found in legend file, don't write to file (Talked to Chr) if (refSnp == null) { if (insideChunk) { overallSnps++; notFoundInLegend++; int i = 0; for (String sample : snp.getSampleNamesOrderedByName()) { if (snp.getGenotype(sample).isCalled()) { snpsPerSampleCount[i] += 1; } i++; } } continue; } else { if (insideChunk) { foundInLegend++; } char legendRef = refSnp.getAlleleA(); char legendAlt = refSnp.getAlleleB(); char studyRef = snp.getReference().getBaseString() .charAt(0); char studyAlt = snp.getAltAlleleWithHighestAlleleCount() .getBaseString().charAt(0); /** simple match of ref/alt in study and legend file **/ if (GenomicTools.match(snp, refSnp)) { if (insideChunk) { match++; } } /** count A/T C/G genotypes **/ else if (GenomicTools.complicatedGenotypes(snp, refSnp)) { if (insideChunk) { strandSwitch2++; } } /** simple allele switch check; ignore A/T C/G from above **/ else if (GenomicTools.alleleSwitch(snp, refSnp)) { if (insideChunk) { alleleSwitch++; logWriter.write("INFO - Allele switch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); } } /** simple strand swaps **/ else if (GenomicTools.strandSwap(studyRef, studyAlt, legendRef, legendAlt)) { if (insideChunk) { strandSwitch1++; filtered++; logWriter.write("FILTER - Strand switch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); } continue; } else if (GenomicTools.strandSwapAndAlleleSwitch(studyRef, studyAlt, legendRef, legendAlt)) { if (insideChunk) { filtered++; strandSwitch3++; logWriter .write("FILTER - Strand switch and Allele switch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); } continue; } // filter allele mismatches else if (GenomicTools.alleleMismatch(studyRef, studyAlt, legendRef, legendAlt)) { if (insideChunk) { logWriter.write("FILTER - Allele mismatch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); alleleMismatch++; filtered++; } continue; } // filter low call rate if (snp.getNoCallCount() / (double) snp.getNSamples() > 0.10) { if (insideChunk) { logWriter .write("FILTER - Low call rate: " + snp.getID() + " - pos: " + snp.getStart() + " (" + (1.0 - snp.getNoCallCount() / (double) snp .getNSamples()) + ")"); lowCallRate++; filtered++; } continue; } // allele-frequency check if (insideChunk) { if (!population.equals("mixed")) { SnpStats statistics; if (GenomicTools.strandSwapAndAlleleSwitch( studyRef, studyAlt, legendRef, legendAlt) || GenomicTools.alleleSwitch(snp, refSnp)) { // swap alleles statistics = calculateAlleleFreq(snp, refSnp, true); } else { statistics = calculateAlleleFreq(snp, refSnp, false); } statisticWriter.write(snp.getID() + "\t" + statistics.toString()); } overallSnps++; } // write only SNPs into minimac file // which came up to this point if (position >= start && position <= end) { newFileWriter.write(line); validSnps++; // check if all samples have // enough SNPs if (insideChunk) { int i = 0; for (String sample : snp .getSampleNamesOrderedByName()) { if (snp.getGenotype(sample).isCalled()) { snpsPerSampleCount[i] += 1; } i++; } } } } } } newFileWriter.close(); // this checks if enough SNPs are included in each sample boolean lowSampleCallRate = false; for (int i = 0; i < snpsPerSampleCount.length; i++) { int snps = snpsPerSampleCount[i]; double sampleCallRate = snps / (double) overallSnps; if (sampleCallRate < CALL_RATE) { lowSampleCallRate = true; chunkWriter.write(chunk.toString() + " Sample " + vcfReader.getFileHeader().getSampleNamesInOrder() .get(i) + ": call rate: " + sampleCallRate); } } // this checks if the amount of not found SNPs in the reference panel is // smaller than 50 %. At least 3 SNPs must be included in each chunk double overlap = foundInLegend / (double) (foundInLegend + notFoundInLegend); if (overlap >= OVERLAP && foundInLegend >= MIN_SNPS && !lowSampleCallRate && validSnps >= MIN_SNPS) { // update chunk chunk.setSnps(overallSnps); chunk.setInReference(foundInLegend); chunk.setVcfFilename(hdfsFilename); context.write(new Text(chunk.getChromosome()), new Text(chunk.serialize())); } else { chunkWriter.write(chunk.toString() + " (Snps: " + overallSnps + ", Reference overlap: " + overlap + ", low sample call rates: " + lowSampleCallRate + ")"); if (overlap < OVERLAP) { removedChunksOverlap++; } else if (foundInLegend < MIN_SNPS || validSnps < MIN_SNPS) { removedChunksSnps++; } else if (lowSampleCallRate) { removedChunksCallRate++; } } vcfReader.close(); reader.close(); legendReader.close(); statisticWriter.write(""); statisticWriter.close(); logWriter.write(""); logWriter.close(); chunkWriter.write(""); chunkWriter.close(); context.getCounter("minimac", "alternativeAlleles").increment( alternativeAlleles); context.getCounter("minimac", "monomorphic").increment(monomorphic); context.getCounter("minimac", "noSnps").increment(noSnps); context.getCounter("minimac", "duplicates").increment(duplicates); context.getCounter("minimac", "foundInLegend").increment(foundInLegend); context.getCounter("minimac", "notFoundInLegend").increment( notFoundInLegend); context.getCounter("minimac", "alleleMismatch").increment( alleleMismatch); context.getCounter("minimac", "strandSwitch1").increment(strandSwitch1); context.getCounter("minimac", "strandSwitch2").increment(strandSwitch2); context.getCounter("minimac", "strandSwitch3").increment(strandSwitch3); context.getCounter("minimac", "match").increment(match); context.getCounter("minimac", "alleleSwitch").increment(alleleSwitch); context.getCounter("minimac", "toLessSamples").increment(lowCallRate); context.getCounter("minimac", "filtered").increment(filtered); context.getCounter("minimac", "removedChunksCallRate").increment( removedChunksCallRate); context.getCounter("minimac", "removedChunksOverlap").increment( removedChunksOverlap); context.getCounter("minimac", "removedChunksSnps").increment( removedChunksSnps); context.getCounter("minimac", "filterFlag").increment(filterFlag); context.getCounter("minimac", "invalidAlleles").increment( invalidAlleles); context.getCounter("minimac", "remainingSnps").increment(overallSnps); // write updated value out } private SnpStats calculateAlleleFreq(VariantContext snp, LegendEntry refSnp, boolean strandSwap) throws IOException, InterruptedException { // calculate allele frequency SnpStats output = new SnpStats(); int position = snp.getStart(); ChiSquareObject chiObj = GenomicTools .chiSquare(snp, refSnp, strandSwap); char majorAllele; char minorAllele; if (!strandSwap) { majorAllele = snp.getReference().getBaseString().charAt(0); minorAllele = snp.getAltAlleleWithHighestAlleleCount() .getBaseString().charAt(0); } else { majorAllele = snp.getAltAlleleWithHighestAlleleCount() .getBaseString().charAt(0); minorAllele = snp.getReference().getBaseString().charAt(0); } output.setType("SNP"); output.setPosition(position); output.setChromosome(snp.getChr()); output.setRefFrequencyA(refSnp.getFrequencyA()); output.setRefFrequencyB(refSnp.getFrequencyB()); output.setFrequencyA((float) chiObj.getP()); output.setFrequencyB((float) chiObj.getQ()); output.setChisq(chiObj.getChisq()); output.setAlleleA(majorAllele); output.setAlleleB(minorAllele); output.setRefAlleleA(refSnp.getAlleleA()); output.setRefAlleleB(refSnp.getAlleleB()); output.setOverlapWithReference(true); return output; } private LegendFileReader getReader(String chromosome) throws IOException, InterruptedException { if (!oldChromosome.equals(chromosome)) { String chrFilename = legendPattern.replaceAll("\\$chr", chromosome); String myLegendFile = FileUtil.path(legendFile, chrFilename); if (!new File(myLegendFile).exists()) { throw new InterruptedException("ReferencePanel '" + myLegendFile + "' not found."); } legendReader = new LegendFileReader(myLegendFile, population); legendReader.createIndex(); legendReader.initSearch(); oldChromosome = chromosome; } return legendReader; } }
src/main/java/genepi/imputationserver/steps/qc/QualityControlMapper.java
package genepi.imputationserver.steps.qc; import genepi.hadoop.CacheStore; import genepi.hadoop.HdfsUtil; import genepi.hadoop.ParameterStore; import genepi.hadoop.PreferenceStore; import genepi.hadoop.io.HdfsLineWriter; import genepi.imputationserver.steps.vcf.VcfChunk; import genepi.imputationserver.util.ChiSquareObject; import genepi.imputationserver.util.GenomicTools; import genepi.io.FileUtil; import genepi.io.legend.LegendEntry; import genepi.io.legend.LegendFileReader; import genepi.io.text.LineReader; import java.io.File; import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.vcf.VCFCodec; import org.broadinstitute.variant.vcf.VCFFileReader; import org.broadinstitute.variant.vcf.VCFHeaderVersion; public class QualityControlMapper extends Mapper<LongWritable, Text, Text, Text> { private static final double CALL_RATE = 0.5; private static final int MIN_SNPS = 3; private static final double OVERLAP = 0.5; private String folder; private LegendFileReader legendReader; private String oldChromosome = ""; private String legendPattern; private String legendFile; private String population; private String output; private String outputRemovedSnps; private int lastPos = 0; private int phasingWindow; protected void setup(Context context) throws IOException, InterruptedException { // read parameters ParameterStore parameters = new ParameterStore(context); legendPattern = parameters.get(QualityControlJob.LEGEND_PATTERN); population = parameters.get(QualityControlJob.LEGEND_POPULATION); output = parameters.get(QualityControlJob.OUTPUT_MAF); outputRemovedSnps = parameters .get(QualityControlJob.OUTPUT_REMOVED_SNPS); String hdfsPath = parameters.get(QualityControlJob.LEGEND_HDFS); String legendFilename = FileUtil.getFilename(hdfsPath); // load files from cache CacheStore cache = new CacheStore(context.getConfiguration()); legendFile = cache.getArchive(legendFilename); // create temp directory PreferenceStore store = new PreferenceStore(context.getConfiguration()); folder = store.getString("minimac.tmp"); folder = FileUtil.path(folder, context.getTaskAttemptID().toString()); FileUtil.createDirectory(folder); phasingWindow = Integer.parseInt(store.getString("phasing.window")); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { // delete temp directory FileUtil.deleteDirectory(folder); } public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { if (value.toString() == null || value.toString().isEmpty()) { return; } VcfChunk chunk = new VcfChunk(value.toString()); String vcfFilename = FileUtil.path(folder, "minimac.vcf.gz"); String vcfFilenameIndex = FileUtil.path(folder, "minimac.vcf.gz.tbi"); HdfsUtil.get(chunk.getVcfFilename(), vcfFilename); HdfsUtil.get(chunk.getIndexFilename(), vcfFilenameIndex); // int errors = 0; HdfsLineWriter statisticWriter = new HdfsLineWriter(HdfsUtil.path( output, chunk.toString())); HdfsLineWriter logWriter = new HdfsLineWriter(HdfsUtil.path( outputRemovedSnps, chunk.toString())); HdfsLineWriter chunkWriter = new HdfsLineWriter(HdfsUtil.path( outputRemovedSnps, "exlcude" + chunk.toString())); String hdfsFilename = chunk.getVcfFilename() + "_" + chunk.getId(); HdfsLineWriter newFileWriter = new HdfsLineWriter(hdfsFilename); // +/- phasingWindow (1 Mbases default) int start = chunk.getStart() - phasingWindow; if (start < 1) { start = 1; } int end = chunk.getEnd() + phasingWindow; LineReader reader = new LineReader(vcfFilename); VCFFileReader vcfReader = new VCFFileReader(new File(vcfFilename)); VCFCodec codec = new VCFCodec(); codec.setVCFHeader(vcfReader.getFileHeader(), VCFHeaderVersion.VCF4_1); int notFoundInLegend = 0; int foundInLegend = 0; int alleleMismatch = 0; int alleleSwitch = 0; int strandSwitch1 = 0; int strandSwitch2 = 0; int strandSwitch3 = 0; int match = 0; int lowCallRate = 0; int filtered = 0; int overallSnps = 0; int validSnps = 0; int monomorphic = 0; int alternativeAlleles = 0; int noSnps = 0; int duplicates = 0; int filterFlag = 0; int invalidAlleles = 0; int removedChunksSnps = 0; int removedChunksOverlap = 0; int removedChunksCallRate = 0; int[] snpsPerSampleCount = null; while (reader.next()) { String line = reader.get(); if (line.startsWith("#")) { newFileWriter.write(line); } else { String tiles[] = line.split("\t", 6); int position = Integer.parseInt(tiles[1]); String ref = tiles[3]; String alt = tiles[4]; boolean insideChunk = position >= chunk.getStart() && position <= chunk.getEnd(); // filter invalid alleles if (!GenomicTools.isValid(ref) || !GenomicTools.isValid(alt)) { if (insideChunk) { logWriter.write("Invalid Alleles: " + tiles[0] + " (" + ref + "/" + alt + ")"); invalidAlleles++; filtered++; } continue; } VariantContext snp = codec.decode(line); // count duplicates if ((lastPos == snp.getStart() && lastPos > 0)) { if (insideChunk) { duplicates++; logWriter.write("FILTER - Duplicate: " + snp.getID() + " - pos: " + snp.getStart()); //logWriter.write("COPY OF: " + tmp); filtered++; } lastPos = snp.getStart(); continue; } String tmp = "FILTER - Duplicate: " + snp.getID() + " - pos: " + snp.getStart(); // update last pos only when not filtered if (!snp.isFiltered()) { lastPos = snp.getStart(); } if (snpsPerSampleCount == null) { snpsPerSampleCount = new int[snp.getNSamples()]; for (int i = 0; i < snp.getNSamples(); i++) { snpsPerSampleCount[i] = 0; } } // filter flag if (snp.isFiltered()) { if (insideChunk) { if (snp.getFilters().contains("DUP")) { duplicates++; logWriter.write("FILTER - Duplicate " + snp.getID() + " - pos: " + snp.getStart()); filtered++; } else { logWriter .write("FILTER - Flag is set: " + snp.getID() + " - pos: " + snp.getStart()); filterFlag++; filtered++; } } continue; } // alternative allele frequency int hetVarOnes = snp.getHetCount(); int homVarOnes = snp.getHomVarCount() * 2; double af = (double) ((hetVarOnes + homVarOnes) / (double) (((snp .getNSamples() - snp.getNoCallCount()) * 2))); if (af > 0.5) { if (insideChunk) { alternativeAlleles++; } } // filter indels if (snp.isIndel() || snp.isComplexIndel()) { if (insideChunk) { logWriter.write("FILTER - InDel: " + snp.getID() + " - pos: " + snp.getStart()); noSnps++; filtered++; } continue; } // remove monomorphic snps // monomorphic only exclude 0/0; if (snp.isMonomorphicInSamples() || snp.getHetCount() == 2 * (snp.getNSamples() - snp .getNoCallCount())) { if (insideChunk) { // System.out.println(snp.getChr()+":"+snp.getStart()); logWriter.write("FILTER - Monomorphic: " + snp.getID() + " - pos: " + snp.getStart()); monomorphic++; filtered++; } continue; } LegendEntry refSnp = getReader(snp.getChr()).findByPosition2( snp.getStart()); // not found in legend file, don't write to file (Talked to Chr) if (refSnp == null) { if (insideChunk) { overallSnps++; notFoundInLegend++; int i = 0; for (String sample : snp.getSampleNamesOrderedByName()) { if (snp.getGenotype(sample).isCalled()) { snpsPerSampleCount[i] += 1; } i++; } } continue; } else { if (insideChunk) { foundInLegend++; } char legendRef = refSnp.getAlleleA(); char legendAlt = refSnp.getAlleleB(); char studyRef = snp.getReference().getBaseString() .charAt(0); char studyAlt = snp.getAltAlleleWithHighestAlleleCount() .getBaseString().charAt(0); /** simple match of ref/alt in study and legend file **/ if (GenomicTools.match(snp, refSnp)) { if (insideChunk) { match++; } } /** count A/T C/G genotypes **/ else if (GenomicTools.complicatedGenotypes(snp, refSnp)) { if (insideChunk) { strandSwitch2++; } } /** simple allele switch check; ignore A/T C/G from above **/ else if (GenomicTools.alleleSwitch(snp, refSnp)) { if (insideChunk) { alleleSwitch++; logWriter.write("INFO - Allele switch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); } } /** simple strand swaps **/ else if (GenomicTools.strandSwap(studyRef, studyAlt, legendRef, legendAlt)) { if (insideChunk) { strandSwitch1++; filtered++; logWriter.write("FILTER - Strand switch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); } continue; } else if (GenomicTools.strandSwapAndAlleleSwitch(studyRef, studyAlt, legendRef, legendAlt)) { if (insideChunk) { filtered++; strandSwitch3++; logWriter .write("FILTER - Strand switch and Allele switch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); } continue; } // filter allele mismatches else if (GenomicTools.alleleMismatch(studyRef, studyAlt, legendRef, legendAlt)) { if (insideChunk) { logWriter.write("FILTER - Allele mismatch: " + snp.getID() + " - pos: " + snp.getStart() + " (ref: " + legendRef + "/" + legendAlt + ", data: " + studyRef + "/" + studyAlt + ")"); alleleMismatch++; filtered++; } continue; } // filter low call rate if (snp.getNoCallCount() / (double) snp.getNSamples() > 0.10) { if (insideChunk) { logWriter .write("FILTER - Low call rate: " + snp.getID() + " - pos: " + snp.getStart() + " (" + (1.0 - snp.getNoCallCount() / (double) snp .getNSamples()) + ")"); lowCallRate++; filtered++; } continue; } // allele-frequency check if (insideChunk) { if (!population.equals("mixed")) { SnpStats statistics; if (GenomicTools.strandSwapAndAlleleSwitch( studyRef, studyAlt, legendRef, legendAlt) || GenomicTools.alleleSwitch(snp, refSnp)) { // swap alleles statistics = calculateAlleleFreq(snp, refSnp, true); } else { statistics = calculateAlleleFreq(snp, refSnp, false); } statisticWriter.write(snp.getID() + "\t" + statistics.toString()); } overallSnps++; } // write only SNPs into minimac file // which came up to this point if (position >= start && position <= end) { newFileWriter.write(line); validSnps++; // check if all samples have // enough SNPs if (insideChunk) { int i = 0; for (String sample : snp .getSampleNamesOrderedByName()) { if (snp.getGenotype(sample).isCalled()) { snpsPerSampleCount[i] += 1; } i++; } } } } } } newFileWriter.close(); // this checks if enough SNPs are included in each sample boolean lowSampleCallRate = false; for (int i = 0; i < snpsPerSampleCount.length; i++) { int snps = snpsPerSampleCount[i]; double sampleCallRate = snps / (double) overallSnps; if (sampleCallRate < CALL_RATE) { lowSampleCallRate = true; chunkWriter.write(chunk.toString() + " Sample " + vcfReader.getFileHeader().getSampleNamesInOrder() .get(i) + ": call rate: " + sampleCallRate); } } // this checks if the amount of not found SNPs in the reference panel is // smaller than 50 %. At least 3 SNPs must be included in each chunk double overlap = foundInLegend / (double) (foundInLegend + notFoundInLegend); //if (overlap >= OVERLAP && foundInLegend >= MIN_SNPS && !lowSampleCallRate && validSnps >= MIN_SNPS) { // update chunk chunk.setSnps(overallSnps); chunk.setInReference(foundInLegend); chunk.setVcfFilename(hdfsFilename); context.write(new Text(chunk.getChromosome()), new Text(chunk.serialize())); /*} else { chunkWriter.write(chunk.toString() + " (Snps: " + overallSnps + ", Reference overlap: " + overlap + ", low sample call rates: " + lowSampleCallRate + ")"); if (overlap < OVERLAP) { removedChunksOverlap++; } else if (foundInLegend < MIN_SNPS || validSnps < MIN_SNPS) { removedChunksSnps++; } else if (lowSampleCallRate) { removedChunksCallRate++; } }*/ vcfReader.close(); reader.close(); legendReader.close(); statisticWriter.write(""); statisticWriter.close(); logWriter.write(""); logWriter.close(); chunkWriter.write(""); chunkWriter.close(); context.getCounter("minimac", "alternativeAlleles").increment( alternativeAlleles); context.getCounter("minimac", "monomorphic").increment(monomorphic); context.getCounter("minimac", "noSnps").increment(noSnps); context.getCounter("minimac", "duplicates").increment(duplicates); context.getCounter("minimac", "foundInLegend").increment(foundInLegend); context.getCounter("minimac", "notFoundInLegend").increment( notFoundInLegend); context.getCounter("minimac", "alleleMismatch").increment( alleleMismatch); context.getCounter("minimac", "strandSwitch1").increment(strandSwitch1); context.getCounter("minimac", "strandSwitch2").increment(strandSwitch2); context.getCounter("minimac", "strandSwitch3").increment(strandSwitch3); context.getCounter("minimac", "match").increment(match); context.getCounter("minimac", "alleleSwitch").increment(alleleSwitch); context.getCounter("minimac", "toLessSamples").increment(lowCallRate); context.getCounter("minimac", "filtered").increment(filtered); context.getCounter("minimac", "removedChunksCallRate").increment( removedChunksCallRate); context.getCounter("minimac", "removedChunksOverlap").increment( removedChunksOverlap); context.getCounter("minimac", "removedChunksSnps").increment( removedChunksSnps); context.getCounter("minimac", "filterFlag").increment(filterFlag); context.getCounter("minimac", "invalidAlleles").increment( invalidAlleles); context.getCounter("minimac", "remainingSnps").increment(overallSnps); // write updated value out } private SnpStats calculateAlleleFreq(VariantContext snp, LegendEntry refSnp, boolean strandSwap) throws IOException, InterruptedException { // calculate allele frequency SnpStats output = new SnpStats(); int position = snp.getStart(); ChiSquareObject chiObj = GenomicTools .chiSquare(snp, refSnp, strandSwap); char majorAllele; char minorAllele; if (!strandSwap) { majorAllele = snp.getReference().getBaseString().charAt(0); minorAllele = snp.getAltAlleleWithHighestAlleleCount() .getBaseString().charAt(0); } else { majorAllele = snp.getAltAlleleWithHighestAlleleCount() .getBaseString().charAt(0); minorAllele = snp.getReference().getBaseString().charAt(0); } output.setType("SNP"); output.setPosition(position); output.setChromosome(snp.getChr()); output.setRefFrequencyA(refSnp.getFrequencyA()); output.setRefFrequencyB(refSnp.getFrequencyB()); output.setFrequencyA((float) chiObj.getP()); output.setFrequencyB((float) chiObj.getQ()); output.setChisq(chiObj.getChisq()); output.setAlleleA(majorAllele); output.setAlleleB(minorAllele); output.setRefAlleleA(refSnp.getAlleleA()); output.setRefAlleleB(refSnp.getAlleleB()); output.setOverlapWithReference(true); return output; } private LegendFileReader getReader(String chromosome) throws IOException, InterruptedException { if (!oldChromosome.equals(chromosome)) { String chrFilename = legendPattern.replaceAll("\\$chr", chromosome); String myLegendFile = FileUtil.path(legendFile, chrFilename); if (!new File(myLegendFile).exists()) { throw new InterruptedException("ReferencePanel '" + myLegendFile + "' not found."); } legendReader = new LegendFileReader(myLegendFile, population); legendReader.createIndex(); legendReader.initSearch(); oldChromosome = chromosome; } return legendReader; } }
Updated QC mapper.
src/main/java/genepi/imputationserver/steps/qc/QualityControlMapper.java
Updated QC mapper.
Java
agpl-3.0
a298880073075b596a6f010c738d2fba3be48412
0
CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine
package com.splicemachine.derby.impl.sql.execute.operations; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; import com.google.common.collect.Iterators; import com.splicemachine.derby.hbase.SpliceDriver; import com.splicemachine.derby.iapi.sql.execute.SpliceNoPutResultSet; import com.splicemachine.derby.iapi.sql.execute.SpliceOperation; import com.splicemachine.derby.iapi.sql.execute.SpliceOperationContext; import com.splicemachine.derby.iapi.sql.execute.SpliceRuntimeContext; import com.splicemachine.derby.iapi.storage.RowProvider; import com.splicemachine.derby.impl.store.access.hbase.HBaseRowLocation; import com.splicemachine.derby.utils.SpliceUtils; import com.splicemachine.derby.utils.marshall.KeyMarshall; import com.splicemachine.derby.utils.marshall.KeyType; import com.splicemachine.derby.utils.marshall.PairDecoder; import com.splicemachine.derby.utils.marshall.RowDecoder; import com.splicemachine.encoding.MultiFieldEncoder; import com.splicemachine.utils.SpliceLogUtils; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.services.loader.GeneratedMethod; import org.apache.derby.iapi.sql.Activation; import org.apache.derby.iapi.sql.execute.ExecRow; import org.apache.derby.iapi.sql.execute.NoPutResultSet; import org.apache.derby.iapi.store.access.Qualifier; import org.apache.derby.iapi.types.SQLInteger; import org.apache.derby.shared.common.reference.MessageId; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; import org.apache.log4j.Logger; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; public class BroadcastJoinOperation extends JoinOperation { private static final long serialVersionUID = 2l; private static Logger LOG = Logger.getLogger(BroadcastJoinOperation.class); protected String emptyRowFunMethodName; protected boolean wasRightOuterJoin; protected Qualifier[][] qualifierProbe; protected int leftHashKeyItem; protected int[] leftHashKeys; protected int rightHashKeyItem; protected int[] rightHashKeys; protected ExecRow rightTemplate; protected static List<NodeType> nodeTypes; protected Scan reduceScan; protected RowProvider clientProvider; protected SQLInteger rowType; protected byte[] priorHash; protected List<ExecRow> rights; protected byte[] rightHash; protected Iterator<ExecRow> rightIterator; protected BroadcastNextRowIterator broadcastIterator; protected Map<ByteBuffer, List<ExecRow>> rightSideMap; protected boolean isOuterJoin = false; protected static final Cache<Integer, Map<ByteBuffer, List<ExecRow>>> broadcastJoinCache; static { nodeTypes = new ArrayList<NodeType>(); nodeTypes.add(NodeType.MAP); nodeTypes.add(NodeType.SCROLL); broadcastJoinCache = CacheBuilder.newBuilder(). maximumSize(50000).expireAfterWrite(10, TimeUnit.MINUTES).removalListener(new RemovalListener<Integer, Map<ByteBuffer, List<ExecRow>>>() { @Override public void onRemoval(RemovalNotification<Integer, Map<ByteBuffer, List<ExecRow>>> notification) { SpliceLogUtils.trace(LOG, "Removing unique sequence ID %s", notification.getKey()); } }).build(); } public BroadcastJoinOperation() { super(); } public BroadcastJoinOperation(SpliceOperation leftResultSet, int leftNumCols, SpliceOperation rightResultSet, int rightNumCols, int leftHashKeyItem, int rightHashKeyItem, Activation activation, GeneratedMethod restriction, int resultSetNumber, boolean oneRowRightSide, boolean notExistsRightSide, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String userSuppliedOptimizerOverrides) throws StandardException { super(leftResultSet, leftNumCols, rightResultSet, rightNumCols, activation, restriction, resultSetNumber, oneRowRightSide, notExistsRightSide, optimizerEstimatedRowCount, optimizerEstimatedCost, userSuppliedOptimizerOverrides); SpliceLogUtils.trace(LOG, "instantiate"); this.leftHashKeyItem = leftHashKeyItem; this.rightHashKeyItem = rightHashKeyItem; init(SpliceOperationContext.newContext(activation)); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { SpliceLogUtils.trace(LOG, "readExternal"); super.readExternal(in); leftHashKeyItem = in.readInt(); rightHashKeyItem = in.readInt(); } @Override public void writeExternal(ObjectOutput out) throws IOException { SpliceLogUtils.trace(LOG, "writeExternal"); super.writeExternal(out); out.writeInt(leftHashKeyItem); out.writeInt(rightHashKeyItem); } @Override public ExecRow nextRow(SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { SpliceLogUtils.trace(LOG, "nextRow"); if (rightSideMap == null) rightSideMap = retrieveRightSideCache(spliceRuntimeContext); while (broadcastIterator == null || !broadcastIterator.hasNext()) { if ((leftRow = leftResultSet.nextRow(spliceRuntimeContext)) == null) { mergedRow = null; this.setCurrentRow(mergedRow); return mergedRow; } else { broadcastIterator = new BroadcastNextRowIterator(leftRow); } } return broadcastIterator.next(); } @Override public RowProvider getReduceRowProvider(SpliceOperation top, PairDecoder decoder, SpliceRuntimeContext spliceRuntimeContext) throws StandardException { return leftResultSet.getReduceRowProvider(top, decoder, spliceRuntimeContext); } @Override public RowProvider getMapRowProvider(SpliceOperation top, PairDecoder decoder,SpliceRuntimeContext spliceRuntimeContext) throws StandardException { return leftResultSet.getMapRowProvider(top, decoder, spliceRuntimeContext); } @Override public void init(SpliceOperationContext context) throws StandardException { SpliceLogUtils.trace(LOG, "init"); super.init(context); leftHashKeys = generateHashKeys(leftHashKeyItem); rightHashKeys = generateHashKeys(rightHashKeyItem); mergedRow = activation.getExecutionFactory().getValueRow(leftNumCols + rightNumCols); rightTemplate = activation.getExecutionFactory().getValueRow(rightNumCols); rightResultSet.init(context); } @Override public NoPutResultSet executeScan(SpliceRuntimeContext runtimeContext) throws StandardException { SpliceLogUtils.trace(LOG, "executeScan"); final List<SpliceOperation> opStack = new ArrayList<SpliceOperation>(); this.generateLeftOperationStack(opStack); SpliceLogUtils.trace(LOG, "operationStack=%s", opStack); // Get the topmost value, instead of the bottommost, in case it's you SpliceOperation regionOperation = opStack.get(opStack.size() - 1); SpliceLogUtils.trace(LOG, "regionOperation=%s", opStack); RowProvider provider; PairDecoder decoder = OperationUtils.getPairDecoder(this,runtimeContext); if (regionOperation.getNodeTypes().contains(NodeType.REDUCE)) { provider = regionOperation.getReduceRowProvider(this, decoder, runtimeContext); } else { provider = regionOperation.getMapRowProvider(this, decoder, runtimeContext); } return new SpliceNoPutResultSet(activation, this, provider); } @Override public ExecRow getExecRowDefinition() throws StandardException { SpliceLogUtils.trace(LOG, "getExecRowDefinition"); JoinUtils.getMergedRow(((SpliceOperation) this.leftResultSet).getExecRowDefinition(), ((SpliceOperation) this.rightResultSet).getExecRowDefinition(), wasRightOuterJoin, rightNumCols, leftNumCols, mergedRow); return mergedRow; } @Override public List<NodeType> getNodeTypes() { SpliceLogUtils.trace(LOG, "getNodeTypes"); return nodeTypes; } @Override public SpliceOperation getLeftOperation() { SpliceLogUtils.trace(LOG, "getLeftOperation"); return leftResultSet; } protected class BroadcastNextRowIterator implements Iterator<ExecRow> { protected ExecRow leftRow; protected Iterator<ExecRow> rightSideIterator = null; protected KeyMarshall leftKeyEncoder = KeyType.BARE; protected MultiFieldEncoder keyEncoder = MultiFieldEncoder.create(SpliceDriver.getKryoPool(),leftNumCols); public BroadcastNextRowIterator(ExecRow leftRow) throws StandardException { this.leftRow = leftRow; keyEncoder.reset(); leftKeyEncoder.encodeKey(leftRow.getRowArray(),leftHashKeys,null,null,keyEncoder); List<ExecRow> rows = rightSideMap.get(ByteBuffer.wrap(keyEncoder.build())); if (rows != null) { if (!notExistsRightSide) { // Sorry for the double negative: only populate the iterator if we're not executing an antijoin rightSideIterator = rows.iterator(); } } else if (isOuterJoin || notExistsRightSide) { rightSideIterator = Iterators.singletonIterator(getEmptyRow()); } } @Override public boolean hasNext() { if (rightSideIterator != null && rightSideIterator.hasNext()) { mergedRow = JoinUtils.getMergedRow(leftRow, rightSideIterator.next(), wasRightOuterJoin, rightNumCols, leftNumCols, mergedRow); setCurrentRow(mergedRow); currentRowLocation = new HBaseRowLocation(SpliceUtils.getUniqueKey()); SpliceLogUtils.trace(LOG, "current row returned %s", currentRow); return true; } return false; } @Override public ExecRow next() { SpliceLogUtils.trace(LOG, "next row=%s",mergedRow); return mergedRow; } @Override public void remove() { throw new RuntimeException("Cannot Be Removed - Not Implemented!"); } } protected ExecRow getEmptyRow() throws StandardException{ throw new RuntimeException("Should only be called on outer joins"); } private Map<ByteBuffer, List<ExecRow>> retrieveRightSideCache(final SpliceRuntimeContext runtimeContext) throws StandardException { try { // Cache population is what we want here concurrency-wise: only one Callable will be invoked to // populate the cache for a given key; any other concurrent .get(k, callable) calls will block return broadcastJoinCache.get(Bytes.mapKey(uniqueSequenceID), new Callable<Map<ByteBuffer, List<ExecRow>>>() { @Override public Map<ByteBuffer, List<ExecRow>> call() throws Exception { SpliceLogUtils.trace(LOG, "Load right-side cache for BroadcastJoin, uniqueSequenceID %s",uniqueSequenceID); return loadRightSide(runtimeContext); } }); } catch (Exception e) { throw StandardException.newException(MessageId.SPLICE_GENERIC_EXCEPTION, e, "Problem loading right-hand cache for BroadcastJoin, uniqueSequenceID " + uniqueSequenceID); } } private Map<ByteBuffer, List<ExecRow>> loadRightSide(SpliceRuntimeContext runtimeContext) throws StandardException, IOException { ByteBuffer hashKey; List<ExecRow> rows; Map<ByteBuffer, List<ExecRow>> cache = new HashMap<ByteBuffer, List<ExecRow>>(); KeyMarshall hasher = KeyType.BARE; NoPutResultSet resultSet = rightResultSet.executeScan(runtimeContext); resultSet.openCore(); MultiFieldEncoder keyEncoder = MultiFieldEncoder.create(SpliceDriver.getKryoPool(),rightNumCols); try{ keyEncoder.mark(); while ((rightRow = resultSet.getNextRowCore()) != null) { keyEncoder.reset(); hasher.encodeKey(rightRow.getRowArray(),rightHashKeys,null,null,keyEncoder); hashKey = ByteBuffer.wrap(keyEncoder.build()); if ((rows = cache.get(hashKey)) != null) { // Only add additional row for same hash if we need it if (!oneRowRightSide) { rows.add(rightRow.getClone()); } } else { rows = new ArrayList<ExecRow>(); rows.add(rightRow.getClone()); cache.put(hashKey, rows); } } return Collections.unmodifiableMap(cache); }finally{ keyEncoder.close(); } } }
structured_derby/src/main/java/com/splicemachine/derby/impl/sql/execute/operations/BroadcastJoinOperation.java
package com.splicemachine.derby.impl.sql.execute.operations; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; import com.google.common.collect.Iterators; import com.splicemachine.derby.hbase.SpliceDriver; import com.splicemachine.derby.iapi.sql.execute.SpliceNoPutResultSet; import com.splicemachine.derby.iapi.sql.execute.SpliceOperation; import com.splicemachine.derby.iapi.sql.execute.SpliceOperationContext; import com.splicemachine.derby.iapi.sql.execute.SpliceRuntimeContext; import com.splicemachine.derby.iapi.storage.RowProvider; import com.splicemachine.derby.impl.store.access.hbase.HBaseRowLocation; import com.splicemachine.derby.utils.SpliceUtils; import com.splicemachine.derby.utils.marshall.KeyMarshall; import com.splicemachine.derby.utils.marshall.KeyType; import com.splicemachine.derby.utils.marshall.PairDecoder; import com.splicemachine.derby.utils.marshall.RowDecoder; import com.splicemachine.encoding.MultiFieldEncoder; import com.splicemachine.utils.SpliceLogUtils; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.services.loader.GeneratedMethod; import org.apache.derby.iapi.sql.Activation; import org.apache.derby.iapi.sql.execute.ExecRow; import org.apache.derby.iapi.sql.execute.NoPutResultSet; import org.apache.derby.iapi.store.access.Qualifier; import org.apache.derby.iapi.types.SQLInteger; import org.apache.derby.shared.common.reference.MessageId; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; import org.apache.log4j.Logger; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; public class BroadcastJoinOperation extends JoinOperation { private static final long serialVersionUID = 2l; private static Logger LOG = Logger.getLogger(BroadcastJoinOperation.class); protected String emptyRowFunMethodName; protected boolean wasRightOuterJoin; protected Qualifier[][] qualifierProbe; protected int leftHashKeyItem; protected int[] leftHashKeys; protected int rightHashKeyItem; protected int[] rightHashKeys; protected ExecRow rightTemplate; protected static List<NodeType> nodeTypes; protected Scan reduceScan; protected RowProvider clientProvider; protected SQLInteger rowType; protected byte[] priorHash; protected List<ExecRow> rights; protected byte[] rightHash; protected Iterator<ExecRow> rightIterator; protected BroadcastNextRowIterator broadcastIterator; protected Map<ByteBuffer, List<ExecRow>> rightSideMap; protected boolean isOuterJoin = false; protected static final Cache<Integer, Map<ByteBuffer, List<ExecRow>>> broadcastJoinCache; static { nodeTypes = new ArrayList<NodeType>(); nodeTypes.add(NodeType.MAP); nodeTypes.add(NodeType.SCROLL); broadcastJoinCache = CacheBuilder.newBuilder(). maximumSize(50000).expireAfterWrite(10, TimeUnit.MINUTES).removalListener(new RemovalListener<Integer, Map<ByteBuffer, List<ExecRow>>>() { @Override public void onRemoval(RemovalNotification<Integer, Map<ByteBuffer, List<ExecRow>>> notification) { SpliceLogUtils.trace(LOG, "Removing unique sequence ID %s", notification.getKey()); } }).build(); } public BroadcastJoinOperation() { super(); } public BroadcastJoinOperation(SpliceOperation leftResultSet, int leftNumCols, SpliceOperation rightResultSet, int rightNumCols, int leftHashKeyItem, int rightHashKeyItem, Activation activation, GeneratedMethod restriction, int resultSetNumber, boolean oneRowRightSide, boolean notExistsRightSide, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String userSuppliedOptimizerOverrides) throws StandardException { super(leftResultSet, leftNumCols, rightResultSet, rightNumCols, activation, restriction, resultSetNumber, oneRowRightSide, notExistsRightSide, optimizerEstimatedRowCount, optimizerEstimatedCost, userSuppliedOptimizerOverrides); SpliceLogUtils.trace(LOG, "instantiate"); this.leftHashKeyItem = leftHashKeyItem; this.rightHashKeyItem = rightHashKeyItem; init(SpliceOperationContext.newContext(activation)); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { SpliceLogUtils.trace(LOG, "readExternal"); super.readExternal(in); leftHashKeyItem = in.readInt(); rightHashKeyItem = in.readInt(); } @Override public void writeExternal(ObjectOutput out) throws IOException { SpliceLogUtils.trace(LOG, "writeExternal"); super.writeExternal(out); out.writeInt(leftHashKeyItem); out.writeInt(rightHashKeyItem); } @Override public ExecRow nextRow(SpliceRuntimeContext spliceRuntimeContext) throws StandardException, IOException { SpliceLogUtils.trace(LOG, "nextRow"); if (rightSideMap == null) rightSideMap = retrieveRightSideCache(spliceRuntimeContext); while (broadcastIterator == null || !broadcastIterator.hasNext()) { if ((leftRow = leftResultSet.nextRow(spliceRuntimeContext)) == null) { mergedRow = null; this.setCurrentRow(mergedRow); return mergedRow; } else { broadcastIterator = new BroadcastNextRowIterator(leftRow); } } return broadcastIterator.next(); } @Override public RowProvider getReduceRowProvider(SpliceOperation top, PairDecoder decoder, SpliceRuntimeContext spliceRuntimeContext) throws StandardException { return leftResultSet.getReduceRowProvider(top, decoder, spliceRuntimeContext); } @Override public RowProvider getMapRowProvider(SpliceOperation top, PairDecoder decoder,SpliceRuntimeContext spliceRuntimeContext) throws StandardException { return leftResultSet.getMapRowProvider(top, decoder, spliceRuntimeContext); } @Override public void init(SpliceOperationContext context) throws StandardException { SpliceLogUtils.trace(LOG, "init"); super.init(context); leftHashKeys = generateHashKeys(leftHashKeyItem); rightHashKeys = generateHashKeys(rightHashKeyItem); mergedRow = activation.getExecutionFactory().getValueRow(leftNumCols + rightNumCols); rightTemplate = activation.getExecutionFactory().getValueRow(rightNumCols); rightResultSet.init(context); } @Override public NoPutResultSet executeScan(SpliceRuntimeContext runtimeContext) throws StandardException { SpliceLogUtils.trace(LOG, "executeScan"); final List<SpliceOperation> opStack = new ArrayList<SpliceOperation>(); this.generateLeftOperationStack(opStack); SpliceLogUtils.trace(LOG, "operationStack=%s", opStack); // Get the topmost value, instead of the bottommost, in case it's you SpliceOperation regionOperation = opStack.get(opStack.size() - 1); SpliceLogUtils.trace(LOG, "regionOperation=%s", opStack); RowProvider provider; PairDecoder decoder = OperationUtils.getPairDecoder(this,runtimeContext); if (regionOperation.getNodeTypes().contains(NodeType.REDUCE)) { provider = regionOperation.getReduceRowProvider(this, decoder, runtimeContext); } else { provider = regionOperation.getMapRowProvider(this, decoder, runtimeContext); } return new SpliceNoPutResultSet(activation, this, provider); } @Override public ExecRow getExecRowDefinition() throws StandardException { SpliceLogUtils.trace(LOG, "getExecRowDefinition"); JoinUtils.getMergedRow(((SpliceOperation) this.leftResultSet).getExecRowDefinition(), ((SpliceOperation) this.rightResultSet).getExecRowDefinition(), wasRightOuterJoin, rightNumCols, leftNumCols, mergedRow); return mergedRow; } @Override public List<NodeType> getNodeTypes() { SpliceLogUtils.trace(LOG, "getNodeTypes"); return nodeTypes; } @Override public SpliceOperation getLeftOperation() { SpliceLogUtils.trace(LOG, "getLeftOperation"); return leftResultSet; } protected class BroadcastNextRowIterator implements Iterator<ExecRow> { protected ExecRow leftRow; protected Iterator<ExecRow> rightSideIterator = null; protected KeyMarshall leftKeyEncoder = KeyType.BARE; protected MultiFieldEncoder keyEncoder = MultiFieldEncoder.create(SpliceDriver.getKryoPool(),leftNumCols); public BroadcastNextRowIterator(ExecRow leftRow) throws StandardException { this.leftRow = leftRow; keyEncoder.reset(); leftKeyEncoder.encodeKey(leftRow.getRowArray(),leftHashKeys,null,null,keyEncoder); List<ExecRow> rows = rightSideMap.get(ByteBuffer.wrap(keyEncoder.build())); if (rows != null) { if (!notExistsRightSide) { // Sorry for the double negative: only populate the iterator if we're not executing an antijoin rightSideIterator = rows.iterator(); } } else if (isOuterJoin || notExistsRightSide) { rightSideIterator = Iterators.singletonIterator(getEmptyRow()); } } @Override public boolean hasNext() { if (rightSideIterator != null && rightSideIterator.hasNext()) { mergedRow = JoinUtils.getMergedRow(leftRow, rightSideIterator.next(), wasRightOuterJoin, rightNumCols, leftNumCols, mergedRow); setCurrentRow(mergedRow); currentRowLocation = new HBaseRowLocation(SpliceUtils.getUniqueKey()); SpliceLogUtils.trace(LOG, "current row returned %s", currentRow); return true; } return false; } @Override public ExecRow next() { SpliceLogUtils.trace(LOG, "next row=%s",mergedRow); return mergedRow; } @Override public void remove() { throw new RuntimeException("Cannot Be Removed - Not Implemented!"); } } protected ExecRow getEmptyRow() throws StandardException{ throw new RuntimeException("Should only be called on outer joins"); } private Map<ByteBuffer, List<ExecRow>> retrieveRightSideCache(final SpliceRuntimeContext runtimeContext) throws StandardException { try { // Cache population is what we want here concurrency-wise: only one Callable will be invoked to // populate the cache for a given key; any other concurrent .get(k, callable) calls will block return broadcastJoinCache.get(Bytes.mapKey(uniqueSequenceID), new Callable<Map<ByteBuffer, List<ExecRow>>>() { @Override public Map<ByteBuffer, List<ExecRow>> call() throws Exception { SpliceLogUtils.trace(LOG, "Load right-side cache for BroadcastJoin, uniqueSequenceID " + uniqueSequenceID); return loadRightSide(runtimeContext); } }); } catch (Exception e) { throw StandardException.newException(MessageId.SPLICE_GENERIC_EXCEPTION, e, "Problem loading right-hand cache for BroadcastJoin, uniqueSequenceID " + uniqueSequenceID); } } private Map<ByteBuffer, List<ExecRow>> loadRightSide(SpliceRuntimeContext runtimeContext) throws StandardException, IOException { ByteBuffer hashKey; List<ExecRow> rows; Map<ByteBuffer, List<ExecRow>> cache = new HashMap<ByteBuffer, List<ExecRow>>(); KeyMarshall hasher = KeyType.BARE; NoPutResultSet resultSet = rightResultSet.executeScan(runtimeContext); resultSet.openCore(); MultiFieldEncoder keyEncoder = MultiFieldEncoder.create(SpliceDriver.getKryoPool(),rightNumCols); try{ keyEncoder.mark(); while ((rightRow = resultSet.getNextRowCore()) != null) { keyEncoder.reset(); hasher.encodeKey(rightRow.getRowArray(),rightHashKeys,null,null,keyEncoder); hashKey = ByteBuffer.wrap(keyEncoder.build()); if ((rows = cache.get(hashKey)) != null) { // Only add additional row for same hash if we need it if (!oneRowRightSide) { rows.add(rightRow.getClone()); } } else { rows = new ArrayList<ExecRow>(); rows.add(rightRow.getClone()); cache.put(hashKey, rows); } } return Collections.unmodifiableMap(cache); }finally{ keyEncoder.close(); } } }
Fixed unique sequence ID string creation in log message
structured_derby/src/main/java/com/splicemachine/derby/impl/sql/execute/operations/BroadcastJoinOperation.java
Fixed unique sequence ID string creation in log message
Java
apache-2.0
b9ad799140a4afd00523cb8605db152d78896360
0
gbif/occurrence,gbif/occurrence,gbif/occurrence
package org.gbif.occurrence.search.es; import org.gbif.api.model.common.search.SearchConstants; import org.gbif.api.model.occurrence.search.OccurrenceSearchParameter; import org.gbif.api.model.occurrence.search.OccurrenceSearchRequest; import org.gbif.api.util.VocabularyUtils; import org.gbif.api.vocabulary.Country; import java.io.IOException; import java.util.*; import java.util.function.Function; import java.util.function.IntUnaryOperator; import java.util.stream.Collectors; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.Polygon; import com.vividsolutions.jts.io.ParseException; import com.vividsolutions.jts.io.WKTReader; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.*; import org.elasticsearch.index.query.*; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilders; import org.locationtech.jts.geom.Coordinate; import static org.gbif.api.util.SearchTypeValidator.isRange; import static org.gbif.occurrence.search.es.EsQueryUtils.*; import static org.gbif.occurrence.search.es.OccurrenceEsField.COORDINATE_SHAPE; import static org.gbif.occurrence.search.es.OccurrenceEsField.FULL_TEXT; public class EsSearchRequestBuilder { private static final int MAX_SIZE_TERMS_AGGS = 1200000; private static final IntUnaryOperator DEFAULT_SHARD_SIZE = size -> (size * 2) + 50000; private EsSearchRequestBuilder() {} public static SearchRequest buildSearchRequest( OccurrenceSearchRequest searchRequest, boolean facetsEnabled, String index) { SearchRequest esRequest = new SearchRequest(); esRequest.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); esRequest.source(searchSourceBuilder); // size and offset searchSourceBuilder.size(searchRequest.getLimit()); searchSourceBuilder.from((int) searchRequest.getOffset()); // sort if (Strings.isNullOrEmpty(searchRequest.getQ())) { searchSourceBuilder.sort(SortBuilders.fieldSort("_doc").order(SortOrder.DESC)); } else { searchSourceBuilder.sort(SortBuilders.scoreSort()); } // group params GroupedParams groupedParams = groupParameters(searchRequest); // add query buildQuery(groupedParams.queryParams, searchRequest.getQ()) .ifPresent(searchSourceBuilder::query); // add aggs buildAggs(searchRequest, groupedParams.postFilterParams, facetsEnabled) .ifPresent(aggsList -> aggsList.forEach(searchSourceBuilder::aggregation)); // post-filter buildPostFilter(groupedParams.postFilterParams).ifPresent(searchSourceBuilder::postFilter); return esRequest; } public static Optional<QueryBuilder> buildQueryNode(OccurrenceSearchRequest searchRequest) { return buildQuery(searchRequest.getParameters(), searchRequest.getQ()); } static SearchRequest buildSuggestQuery( String prefix, OccurrenceSearchParameter parameter, Integer limit, String index) { SearchRequest request = new SearchRequest(); request.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); request.source(searchSourceBuilder); OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(parameter); // create suggest query searchSourceBuilder.suggest( new SuggestBuilder() .addSuggestion( esField.getFieldName(), SuggestBuilders.completionSuggestion(esField.getFieldName() + ".suggest") .prefix(prefix) .size(limit != null ? limit : SearchConstants.DEFAULT_SUGGEST_LIMIT) .skipDuplicates(true))); // add source field searchSourceBuilder.fetchSource(esField.getFieldName(), null); return request; } private static Optional<QueryBuilder> buildQuery( Multimap<OccurrenceSearchParameter, String> params, String qParam) { // create bool node BoolQueryBuilder bool = QueryBuilders.boolQuery(); // adding full text search parameter if (!Strings.isNullOrEmpty(qParam)) { bool.must(QueryBuilders.matchQuery(FULL_TEXT.getFieldName(), qParam)); } if (params != null && !params.isEmpty()) { // adding geometry to bool if (params.containsKey(OccurrenceSearchParameter.GEOMETRY)) { BoolQueryBuilder shouldGeometry = QueryBuilders.boolQuery(); shouldGeometry .should() .addAll( params.get(OccurrenceSearchParameter.GEOMETRY).stream() .map(EsSearchRequestBuilder::buildGeoShapeQuery) .collect(Collectors.toList())); bool.filter().add(shouldGeometry); } // adding term queries to bool bool.filter() .addAll( params.asMap().entrySet().stream() .filter(e -> Objects.nonNull(SEARCH_TO_ES_MAPPING.get(e.getKey()))) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); } return bool.must().isEmpty() && bool.filter().isEmpty() ? Optional.empty() : Optional.of(bool); } @VisibleForTesting static GroupedParams groupParameters(OccurrenceSearchRequest searchRequest) { GroupedParams groupedParams = new GroupedParams(); if (!searchRequest.isMultiSelectFacets() || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { groupedParams.queryParams = searchRequest.getParameters(); return groupedParams; } groupedParams.queryParams = ArrayListMultimap.create(); groupedParams.postFilterParams = ArrayListMultimap.create(); searchRequest .getParameters() .asMap() .forEach( (k, v) -> { if (searchRequest.getFacets().contains(k)) { groupedParams.postFilterParams.putAll(k, v); } else { groupedParams.queryParams.putAll(k, v); } }); return groupedParams; } private static Optional<QueryBuilder> buildPostFilter( Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (postFilterParams == null || postFilterParams.isEmpty()) { return Optional.empty(); } BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); return Optional.of(bool); } private static Optional<List<AggregationBuilder>> buildAggs( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams, boolean facetsEnabled) { if (!facetsEnabled || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { return Optional.empty(); } if (searchRequest.isMultiSelectFacets() && postFilterParams != null && !postFilterParams.isEmpty()) { return Optional.of(buildFacetsMultiselect(searchRequest, postFilterParams)); } return Optional.of(buildFacets(searchRequest)); } private static List<AggregationBuilder> buildFacetsMultiselect( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (searchRequest.getFacets().size() == 1) { // same case as normal facets return buildFacets(searchRequest); } return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { // build filter aggs BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .filter(entry -> entry.getKey() != facetParam) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); // add filter to the aggs OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); FilterAggregationBuilder filterAggs = AggregationBuilders.filter(esField.getFieldName(), bool); // build terms aggs and add it to the filter aggs TermsAggregationBuilder termsAggs = buildTermsAggs( "filtered_" + esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); filterAggs.subAggregation(termsAggs); return filterAggs; }) .collect(Collectors.toList()); } private static List<AggregationBuilder> buildFacets(OccurrenceSearchRequest searchRequest) { return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); return buildTermsAggs( esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); }) .collect(Collectors.toList()); } private static TermsAggregationBuilder buildTermsAggs( String aggsName, OccurrenceEsField esField, int facetOffset, int facetLimit, Integer minCount) { // build aggs for the field TermsAggregationBuilder termsAggsBuilder = AggregationBuilders.terms(aggsName).field(esField.getFieldName()); // min count Optional.ofNullable(minCount).ifPresent(termsAggsBuilder::minDocCount); // aggs size int size = calculateAggsSize(esField, facetOffset, facetLimit); termsAggsBuilder.size(size); // aggs shard size termsAggsBuilder.shardSize(CARDINALITIES.getOrDefault(esField, DEFAULT_SHARD_SIZE.applyAsInt(size))); return termsAggsBuilder; } private static int calculateAggsSize(OccurrenceEsField esField, int facetOffset, int facetLimit) { int maxCardinality = CARDINALITIES.getOrDefault(esField, Integer.MAX_VALUE); // the limit is bounded by the max cardinality of the field int limit = Math.min(facetOffset + facetLimit, maxCardinality); // we set a maximum limit for performance reasons if (limit > MAX_SIZE_TERMS_AGGS) { throw new IllegalArgumentException( "Facets paging is only supported up to " + MAX_SIZE_TERMS_AGGS + " elements"); } return limit; } /** * Mapping parameter values into know values for Enums. * Non-enum parameter values are passed using its raw value. */ private static String parseParamValue(String value, OccurrenceSearchParameter parameter) { if (Enum.class.isAssignableFrom(parameter.type()) && !Country.class.isAssignableFrom(parameter.type())) { return VocabularyUtils.lookup(value, (Class<Enum<?>>)parameter.type()).transform(Enum::name).orNull(); } if (Boolean.class.isAssignableFrom(parameter.type())) { return value.toLowerCase(); } return value; } private static List<QueryBuilder> buildTermQuery(Collection<String> values, OccurrenceSearchParameter param, OccurrenceEsField esField) { List<QueryBuilder> queries = new ArrayList<>(); // collect queries for each value List<String> parsedValues = new ArrayList<>(); for (String value : values) { if (isRange(value)) { queries.add(buildRangeQuery(esField, value)); continue; } parsedValues.add(parseParamValue(value, param)); } if (parsedValues.size() == 1) { // single term queries.add(QueryBuilders.termQuery(esField.getFieldName(), parsedValues.get(0))); } else if (parsedValues.size() > 1) { // multi term query queries.add(QueryBuilders.termsQuery(esField.getFieldName(), parsedValues)); } return queries; } private static RangeQueryBuilder buildRangeQuery(OccurrenceEsField esField, String value) { String[] values = value.split(RANGE_SEPARATOR); RangeQueryBuilder builder = QueryBuilders.rangeQuery(esField.getFieldName()); if (!RANGE_WILDCARD.equals(values[0])) { builder.gte(values[0]); } if (!RANGE_WILDCARD.equals(values[1])) { builder.lte(values[1]); } return builder; } private static List<Coordinate> asCollectionOfCoordinates( com.vividsolutions.jts.geom.Coordinate[] coordinates) { return Arrays.stream(coordinates) .map(coord -> new Coordinate(coord.x, coord.y)) .collect(Collectors.toList()); } public static GeoShapeQueryBuilder buildGeoShapeQuery(String wkt) { Geometry geometry; try { geometry = new WKTReader().read(wkt); } catch (ParseException e) { throw new IllegalArgumentException(e.getMessage(), e); } Function<Polygon, PolygonBuilder> polygonToBuilder = polygon -> { PolygonBuilder polygonBuilder = new PolygonBuilder( new CoordinatesBuilder() .coordinates( asCollectionOfCoordinates(polygon.getExteriorRing().getCoordinates()))); for (int i = 0; i < polygon.getNumInteriorRing(); i++) { polygonBuilder.hole( new LineStringBuilder( new CoordinatesBuilder() .coordinates( asCollectionOfCoordinates( polygon.getInteriorRingN(i).getCoordinates())))); } return polygonBuilder; }; String type = "LinearRing".equals(geometry.getGeometryType()) ? "LINESTRING" : geometry.getGeometryType().toUpperCase(); ShapeBuilder shapeBuilder = null; if (("POINT").equals(type)) { shapeBuilder = new PointBuilder(geometry.getCoordinate().x, geometry.getCoordinate().y); } else if ("LINESTRING".equals(type)) { shapeBuilder = new LineStringBuilder(asCollectionOfCoordinates(geometry.getCoordinates())); } else if ("POLYGON".equals(type)) { shapeBuilder = polygonToBuilder.apply((Polygon) geometry); } else if ("MULTIPOLYGON".equals(type)) { // multipolygon MultiPolygonBuilder multiPolygonBuilder = new MultiPolygonBuilder(); for (int i = 0; i < geometry.getNumGeometries(); i++) { multiPolygonBuilder.polygon(polygonToBuilder.apply((Polygon) geometry.getGeometryN(i))); } shapeBuilder = multiPolygonBuilder; } else { throw new IllegalArgumentException(type + " shape is not supported"); } try { return QueryBuilders.geoShapeQuery(COORDINATE_SHAPE.getFieldName(), shapeBuilder) .relation(ShapeRelation.WITHIN); } catch (IOException e) { throw new IllegalStateException(e.getMessage(), e); } } @VisibleForTesting static class GroupedParams { Multimap<OccurrenceSearchParameter, String> postFilterParams; Multimap<OccurrenceSearchParameter, String> queryParams; } }
occurrence-search/src/main/java/org/gbif/occurrence/search/es/EsSearchRequestBuilder.java
package org.gbif.occurrence.search.es; import org.gbif.api.model.common.search.SearchConstants; import org.gbif.api.model.occurrence.search.OccurrenceSearchParameter; import org.gbif.api.model.occurrence.search.OccurrenceSearchRequest; import org.gbif.api.util.VocabularyUtils; import org.gbif.api.vocabulary.Country; import java.io.IOException; import java.util.*; import java.util.function.Function; import java.util.function.IntUnaryOperator; import java.util.stream.Collectors; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.Polygon; import com.vividsolutions.jts.io.ParseException; import com.vividsolutions.jts.io.WKTReader; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.*; import org.elasticsearch.index.query.*; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilders; import org.locationtech.jts.geom.Coordinate; import static org.gbif.api.util.SearchTypeValidator.isRange; import static org.gbif.occurrence.search.es.EsQueryUtils.*; import static org.gbif.occurrence.search.es.OccurrenceEsField.COORDINATE_SHAPE; import static org.gbif.occurrence.search.es.OccurrenceEsField.FULL_TEXT; public class EsSearchRequestBuilder { private static final int MAX_SIZE_TERMS_AGGS = 1200000; private static final IntUnaryOperator DEFAULT_SHARD_SIZE = size -> (int) (size * 1.5) + 3000; private EsSearchRequestBuilder() {} public static SearchRequest buildSearchRequest( OccurrenceSearchRequest searchRequest, boolean facetsEnabled, String index) { SearchRequest esRequest = new SearchRequest(); esRequest.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); esRequest.source(searchSourceBuilder); // size and offset searchSourceBuilder.size(searchRequest.getLimit()); searchSourceBuilder.from((int) searchRequest.getOffset()); // sort if (Strings.isNullOrEmpty(searchRequest.getQ())) { searchSourceBuilder.sort(SortBuilders.fieldSort("_doc").order(SortOrder.DESC)); } else { searchSourceBuilder.sort(SortBuilders.scoreSort()); } // group params GroupedParams groupedParams = groupParameters(searchRequest); // add query buildQuery(groupedParams.queryParams, searchRequest.getQ()) .ifPresent(searchSourceBuilder::query); // add aggs buildAggs(searchRequest, groupedParams.postFilterParams, facetsEnabled) .ifPresent(aggsList -> aggsList.forEach(searchSourceBuilder::aggregation)); // post-filter buildPostFilter(groupedParams.postFilterParams).ifPresent(searchSourceBuilder::postFilter); return esRequest; } public static Optional<QueryBuilder> buildQueryNode(OccurrenceSearchRequest searchRequest) { return buildQuery(searchRequest.getParameters(), searchRequest.getQ()); } static SearchRequest buildSuggestQuery( String prefix, OccurrenceSearchParameter parameter, Integer limit, String index) { SearchRequest request = new SearchRequest(); request.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); request.source(searchSourceBuilder); OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(parameter); // create suggest query searchSourceBuilder.suggest( new SuggestBuilder() .addSuggestion( esField.getFieldName(), SuggestBuilders.completionSuggestion(esField.getFieldName() + ".suggest") .prefix(prefix) .size(limit != null ? limit : SearchConstants.DEFAULT_SUGGEST_LIMIT) .skipDuplicates(true))); // add source field searchSourceBuilder.fetchSource(esField.getFieldName(), null); return request; } private static Optional<QueryBuilder> buildQuery( Multimap<OccurrenceSearchParameter, String> params, String qParam) { // create bool node BoolQueryBuilder bool = QueryBuilders.boolQuery(); // adding full text search parameter if (!Strings.isNullOrEmpty(qParam)) { bool.must(QueryBuilders.matchQuery(FULL_TEXT.getFieldName(), qParam)); } if (params != null && !params.isEmpty()) { // adding geometry to bool if (params.containsKey(OccurrenceSearchParameter.GEOMETRY)) { BoolQueryBuilder shouldGeometry = QueryBuilders.boolQuery(); shouldGeometry .should() .addAll( params.get(OccurrenceSearchParameter.GEOMETRY).stream() .map(EsSearchRequestBuilder::buildGeoShapeQuery) .collect(Collectors.toList())); bool.filter().add(shouldGeometry); } // adding term queries to bool bool.filter() .addAll( params.asMap().entrySet().stream() .filter(e -> Objects.nonNull(SEARCH_TO_ES_MAPPING.get(e.getKey()))) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); } return bool.must().isEmpty() && bool.filter().isEmpty() ? Optional.empty() : Optional.of(bool); } @VisibleForTesting static GroupedParams groupParameters(OccurrenceSearchRequest searchRequest) { GroupedParams groupedParams = new GroupedParams(); if (!searchRequest.isMultiSelectFacets() || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { groupedParams.queryParams = searchRequest.getParameters(); return groupedParams; } groupedParams.queryParams = ArrayListMultimap.create(); groupedParams.postFilterParams = ArrayListMultimap.create(); searchRequest .getParameters() .asMap() .forEach( (k, v) -> { if (searchRequest.getFacets().contains(k)) { groupedParams.postFilterParams.putAll(k, v); } else { groupedParams.queryParams.putAll(k, v); } }); return groupedParams; } private static Optional<QueryBuilder> buildPostFilter( Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (postFilterParams == null || postFilterParams.isEmpty()) { return Optional.empty(); } BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); return Optional.of(bool); } private static Optional<List<AggregationBuilder>> buildAggs( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams, boolean facetsEnabled) { if (!facetsEnabled || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { return Optional.empty(); } if (searchRequest.isMultiSelectFacets() && postFilterParams != null && !postFilterParams.isEmpty()) { return Optional.of(buildFacetsMultiselect(searchRequest, postFilterParams)); } return Optional.of(buildFacets(searchRequest)); } private static List<AggregationBuilder> buildFacetsMultiselect( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (searchRequest.getFacets().size() == 1) { // same case as normal facets return buildFacets(searchRequest); } return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { // build filter aggs BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .filter(entry -> entry.getKey() != facetParam) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); // add filter to the aggs OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); FilterAggregationBuilder filterAggs = AggregationBuilders.filter(esField.getFieldName(), bool); // build terms aggs and add it to the filter aggs TermsAggregationBuilder termsAggs = buildTermsAggs( "filtered_" + esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); filterAggs.subAggregation(termsAggs); return filterAggs; }) .collect(Collectors.toList()); } private static List<AggregationBuilder> buildFacets(OccurrenceSearchRequest searchRequest) { return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); return buildTermsAggs( esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); }) .collect(Collectors.toList()); } private static TermsAggregationBuilder buildTermsAggs( String aggsName, OccurrenceEsField esField, int facetOffset, int facetLimit, Integer minCount) { // build aggs for the field TermsAggregationBuilder termsAggsBuilder = AggregationBuilders.terms(aggsName).field(esField.getFieldName()); // min count Optional.ofNullable(minCount).ifPresent(termsAggsBuilder::minDocCount); // aggs size int size = calculateAggsSize(esField, facetOffset, facetLimit); termsAggsBuilder.size(size); // aggs shard size termsAggsBuilder.shardSize(CARDINALITIES.getOrDefault(esField, DEFAULT_SHARD_SIZE.applyAsInt(size))); return termsAggsBuilder; } private static int calculateAggsSize(OccurrenceEsField esField, int facetOffset, int facetLimit) { int maxCardinality = CARDINALITIES.getOrDefault(esField, Integer.MAX_VALUE); // the limit is bounded by the max cardinality of the field int limit = Math.min(facetOffset + facetLimit, maxCardinality); // we set a maximum limit for performance reasons if (limit > MAX_SIZE_TERMS_AGGS) { throw new IllegalArgumentException( "Facets paging is only supported up to " + MAX_SIZE_TERMS_AGGS + " elements"); } return limit; } /** * Mapping parameter values into know values for Enums. * Non-enum parameter values are passed using its raw value. */ private static String parseParamValue(String value, OccurrenceSearchParameter parameter) { if (Enum.class.isAssignableFrom(parameter.type()) && !Country.class.isAssignableFrom(parameter.type())) { return VocabularyUtils.lookup(value, (Class<Enum<?>>)parameter.type()).transform(Enum::name).orNull(); } if (Boolean.class.isAssignableFrom(parameter.type())) { return value.toLowerCase(); } return value; } private static List<QueryBuilder> buildTermQuery(Collection<String> values, OccurrenceSearchParameter param, OccurrenceEsField esField) { List<QueryBuilder> queries = new ArrayList<>(); // collect queries for each value List<String> parsedValues = new ArrayList<>(); for (String value : values) { if (isRange(value)) { queries.add(buildRangeQuery(esField, value)); continue; } parsedValues.add(parseParamValue(value, param)); } if (parsedValues.size() == 1) { // single term queries.add(QueryBuilders.termQuery(esField.getFieldName(), parsedValues.get(0))); } else if (parsedValues.size() > 1) { // multi term query queries.add(QueryBuilders.termsQuery(esField.getFieldName(), parsedValues)); } return queries; } private static RangeQueryBuilder buildRangeQuery(OccurrenceEsField esField, String value) { String[] values = value.split(RANGE_SEPARATOR); RangeQueryBuilder builder = QueryBuilders.rangeQuery(esField.getFieldName()); if (!RANGE_WILDCARD.equals(values[0])) { builder.gte(values[0]); } if (!RANGE_WILDCARD.equals(values[1])) { builder.lte(values[1]); } return builder; } private static List<Coordinate> asCollectionOfCoordinates( com.vividsolutions.jts.geom.Coordinate[] coordinates) { return Arrays.stream(coordinates) .map(coord -> new Coordinate(coord.x, coord.y)) .collect(Collectors.toList()); } public static GeoShapeQueryBuilder buildGeoShapeQuery(String wkt) { Geometry geometry; try { geometry = new WKTReader().read(wkt); } catch (ParseException e) { throw new IllegalArgumentException(e.getMessage(), e); } Function<Polygon, PolygonBuilder> polygonToBuilder = polygon -> { PolygonBuilder polygonBuilder = new PolygonBuilder( new CoordinatesBuilder() .coordinates( asCollectionOfCoordinates(polygon.getExteriorRing().getCoordinates()))); for (int i = 0; i < polygon.getNumInteriorRing(); i++) { polygonBuilder.hole( new LineStringBuilder( new CoordinatesBuilder() .coordinates( asCollectionOfCoordinates( polygon.getInteriorRingN(i).getCoordinates())))); } return polygonBuilder; }; String type = "LinearRing".equals(geometry.getGeometryType()) ? "LINESTRING" : geometry.getGeometryType().toUpperCase(); ShapeBuilder shapeBuilder = null; if (("POINT").equals(type)) { shapeBuilder = new PointBuilder(geometry.getCoordinate().x, geometry.getCoordinate().y); } else if ("LINESTRING".equals(type)) { shapeBuilder = new LineStringBuilder(asCollectionOfCoordinates(geometry.getCoordinates())); } else if ("POLYGON".equals(type)) { shapeBuilder = polygonToBuilder.apply((Polygon) geometry); } else if ("MULTIPOLYGON".equals(type)) { // multipolygon MultiPolygonBuilder multiPolygonBuilder = new MultiPolygonBuilder(); for (int i = 0; i < geometry.getNumGeometries(); i++) { multiPolygonBuilder.polygon(polygonToBuilder.apply((Polygon) geometry.getGeometryN(i))); } shapeBuilder = multiPolygonBuilder; } else { throw new IllegalArgumentException(type + " shape is not supported"); } try { return QueryBuilders.geoShapeQuery(COORDINATE_SHAPE.getFieldName(), shapeBuilder) .relation(ShapeRelation.WITHIN); } catch (IOException e) { throw new IllegalStateException(e.getMessage(), e); } } @VisibleForTesting static class GroupedParams { Multimap<OccurrenceSearchParameter, String> postFilterParams; Multimap<OccurrenceSearchParameter, String> queryParams; } }
updated default shard size for ES aggs to improve accuracy in results
occurrence-search/src/main/java/org/gbif/occurrence/search/es/EsSearchRequestBuilder.java
updated default shard size for ES aggs to improve accuracy in results
Java
apache-2.0
e0d876963ce1161ae6946ef05964d2dd90e5a415
0
socialsensor/socialsensor-stream-manager,MKLab-ITI/mklab-stream-manager,socialsensor/socialsensor-stream-manager,socialsensor/socialsensor-stream-manager
package eu.socialsensor.sfc.streams.store; import java.io.IOException; import org.apache.log4j.Logger; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrServer; import eu.socialsensor.framework.client.search.solr.SolrItemHandler; import eu.socialsensor.framework.client.search.solr.SolrMediaItemHandler; import eu.socialsensor.framework.client.search.solr.SolrNewsFeedHandler; import eu.socialsensor.framework.common.domain.Item; import eu.socialsensor.framework.common.domain.MediaItem; import eu.socialsensor.sfc.streams.StorageConfiguration; /** * Class for storing items to solr * @author manosetro * @email [email protected] * */ public class SolrStorage implements StreamUpdateStorage { private Logger logger = Logger.getLogger(SolrStorage.class); private static final String HOSTNAME = "solr.hostname"; private static final String SERVICE = "solr.service"; private static final String ITEMS_COLLECTION = "solr.items.collection"; private static final String MEDIAITEMS_COLLECTION = "solr.mediaitems.collection"; private static final String NEWSFEED_COLLECTION = "solr.newsfeed.collection"; private static final String FACEBOOK_ITEMS_COLLECTION = "solr.facebook.items.collection"; private static final String TWITTER_ITEMS_COLLECTION = "solr.twitter.items.collection"; private String hostname, service; private String itemsCollection = null; private String mediaItemsCollection = null; private String newsFeedCollection = null; private String facebookItemsCollection = null; private String twitterItemsCollection = null; private String storageName = "Solr"; private SolrItemHandler solrItemHandler = null; private SolrItemHandler solrFacebookItemHandler = null; private SolrItemHandler solrTwitterItemHandler = null; private SolrMediaItemHandler solrMediaHandler = null; private SolrNewsFeedHandler solrNewsFeedHandler = null; public SolrStorage(StorageConfiguration config) throws IOException { this.hostname = config.getParameter(SolrStorage.HOSTNAME); this.service = config.getParameter(SolrStorage.SERVICE); this.itemsCollection = config.getParameter(SolrStorage.ITEMS_COLLECTION); this.mediaItemsCollection = config.getParameter(SolrStorage.MEDIAITEMS_COLLECTION); this.newsFeedCollection = config.getParameter(SolrStorage.NEWSFEED_COLLECTION); this.facebookItemsCollection = config.getParameter(SolrStorage.FACEBOOK_ITEMS_COLLECTION); this.twitterItemsCollection = config.getParameter(SolrStorage.TWITTER_ITEMS_COLLECTION); } public SolrItemHandler getItemHandler() { return solrItemHandler; } public SolrItemHandler getFacebookItemHandler() { return solrFacebookItemHandler; } public SolrItemHandler getTwitterItemHandler() { return solrTwitterItemHandler; } public SolrMediaItemHandler getMediaItemHandler() { return solrMediaHandler; } public SolrNewsFeedHandler getNewsFeedHandler() { return solrNewsFeedHandler; } @Override public boolean open(){ try { if(itemsCollection != null) { solrItemHandler = SolrItemHandler.getInstance(hostname+"/"+service+"/"+itemsCollection); } if(mediaItemsCollection != null) { solrMediaHandler = SolrMediaItemHandler.getInstance(hostname+"/"+service+"/"+mediaItemsCollection); } if(newsFeedCollection != null) { solrNewsFeedHandler = SolrNewsFeedHandler.getInstance(hostname+"/"+service+"/"+newsFeedCollection); } if(facebookItemsCollection != null) { solrFacebookItemHandler = SolrItemHandler.getInstance(hostname+"/"+service+"/"+facebookItemsCollection); } if(twitterItemsCollection != null) { solrTwitterItemHandler = SolrItemHandler.getInstance(hostname+"/"+service+"/"+twitterItemsCollection); } } catch (Exception e) { // TODO Auto-generated catch block return false; } return true; } @Override public void store(Item item) throws IOException { // Index only original Items and MediaItems come from original Items if(!item.isOriginal()) return; if(solrItemHandler != null) { solrItemHandler.insertItem(item); } if(solrFacebookItemHandler != null && item.getStreamId().equals("Facebook")) { solrFacebookItemHandler.insertItem(item); } if(solrTwitterItemHandler != null && item.getStreamId().equals("Twitter")) { solrTwitterItemHandler.insertItem(item); } if(solrNewsFeedHandler != null){ solrNewsFeedHandler.insertItem(item); } if(solrMediaHandler != null) { for(MediaItem mediaItem : item.getMediaItems()) { MediaItem mi = solrMediaHandler.getSolrMediaItem(mediaItem.getId()); if(mi==null) { solrMediaHandler.insertMediaItem(mediaItem); } else { solrMediaHandler.insertMediaItem(mi); } } } } @Override public void update(Item update) throws IOException { store(update); } @Override public boolean delete(String itemId) throws IOException { //logger.info("Delete item with id " + itemId + " from Solr."); solrItemHandler.deleteItem(itemId); return true; } @Override public boolean checkStatus(StreamUpdateStorage storage) { if(itemsCollection != null){ try { solrItemHandler.checkServerStatus(); return true; } catch (Exception e) { // TODO Auto-generated catch block return false; } } if(mediaItemsCollection != null){ try { solrMediaHandler.checkServerStatus(); return true; } catch (Exception e) { // TODO Auto-generated catch block return false; } } if(newsFeedCollection != null){ try { solrNewsFeedHandler.checkServerStatus(); return true; } catch (Exception e) { // TODO Auto-generated catch block return false; } } return false; } @Override public void close() { try { commit(); } catch (IOException e) { e.printStackTrace(); } } @Override public void updateTimeslot() { try { commit(); } catch (Exception e) { logger.error(e); } } private void commit() throws IOException { try { commit(); } catch (Exception e) { logger.error(e); } } @Override public String getStorageName(){ return this.storageName; } public static void main(String[] args) throws IOException { } }
src/main/java/eu/socialsensor/sfc/streams/store/SolrStorage.java
package eu.socialsensor.sfc.streams.store; import java.io.IOException; import org.apache.log4j.Logger; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrServer; import eu.socialsensor.framework.client.search.solr.SolrItemHandler; import eu.socialsensor.framework.client.search.solr.SolrMediaItemHandler; import eu.socialsensor.framework.client.search.solr.SolrNewsFeedHandler; import eu.socialsensor.framework.common.domain.Item; import eu.socialsensor.framework.common.domain.MediaItem; import eu.socialsensor.sfc.streams.StorageConfiguration; /** * Class for storing items to solr * @author manosetro * @email [email protected] * */ public class SolrStorage implements StreamUpdateStorage { private Logger logger = Logger.getLogger(SolrStorage.class); private static final String HOSTNAME = "solr.hostname"; private static final String SERVICE = "solr.service"; private static final String ITEMS_COLLECTION = "solr.items.collection"; private static final String MEDIAITEMS_COLLECTION = "solr.mediaitems.collection"; private static final String NEWSFEED_COLLECTION = "solr.newsfeed.collection"; private static final String FACEBOOK_ITEMS_COLLECTION = "solr.facebook.items.collection"; private static final String TWITTER_ITEMS_COLLECTION = "solr.twitter.items.collection"; private String hostname, service; private String itemsCollection = null; private String mediaItemsCollection = null; private String newsFeedCollection = null; private String facebookItemsCollection = null; private String twitterItemsCollection = null; private String storageName = "Solr"; private SolrItemHandler solrItemHandler = null; private SolrItemHandler solrFacebookItemHandler = null; private SolrItemHandler solrTwitterItemHandler = null; private SolrMediaItemHandler solrMediaHandler = null; private SolrNewsFeedHandler solrNewsFeedHandler = null; public SolrStorage(StorageConfiguration config) throws IOException { this.hostname = config.getParameter(SolrStorage.HOSTNAME); this.service = config.getParameter(SolrStorage.SERVICE); this.itemsCollection = config.getParameter(SolrStorage.ITEMS_COLLECTION); this.mediaItemsCollection = config.getParameter(SolrStorage.MEDIAITEMS_COLLECTION); this.newsFeedCollection = config.getParameter(SolrStorage.NEWSFEED_COLLECTION); this.facebookItemsCollection = config.getParameter(SolrStorage.FACEBOOK_ITEMS_COLLECTION); this.twitterItemsCollection = config.getParameter(SolrStorage.TWITTER_ITEMS_COLLECTION); } public SolrItemHandler getItemHandler() { return solrItemHandler; } public SolrItemHandler getFacebookItemHandler() { return solrFacebookItemHandler; } public SolrItemHandler getTwitterItemHandler() { return solrTwitterItemHandler; } public SolrMediaItemHandler getMediaItemHandler() { return solrMediaHandler; } public SolrNewsFeedHandler getNewsFeedHandler() { return solrNewsFeedHandler; } @Override public boolean open(){ try { if(itemsCollection != null) { solrItemHandler = SolrItemHandler.getInstance(hostname+"/"+service+"/"+itemsCollection); } if(mediaItemsCollection != null) { solrMediaHandler = SolrMediaItemHandler.getInstance(hostname+"/"+service+"/"+mediaItemsCollection); } if(newsFeedCollection != null) { solrNewsFeedHandler = SolrNewsFeedHandler.getInstance(hostname+"/"+service+"/"+newsFeedCollection); } if(facebookItemsCollection != null) { solrFacebookItemHandler = SolrItemHandler.getInstance(hostname+"/"+service+"/"+facebookItemsCollection); } if(twitterItemsCollection != null) { solrTwitterItemHandler = SolrItemHandler.getInstance(hostname+"/"+service+"/"+twitterItemsCollection); } } catch (Exception e) { // TODO Auto-generated catch block return false; } return true; } @Override public void store(Item item) throws IOException { // Index only original Items and MediaItems come from original Items if(!item.isOriginal()) return; if(solrItemHandler != null) { solrItemHandler.insertItem(item); } if(solrFacebookItemHandler != null && item.getStreamId().equals("Facebook")) { solrFacebookItemHandler.insertItem(item); } if(solrTwitterItemHandler != null && item.getStreamId().equals("Twitter")) { solrTwitterItemHandler.insertItem(item); } if(solrNewsFeedHandler != null){ solrNewsFeedHandler.insertItem(item); } if(solrMediaHandler != null) { for(MediaItem mediaItem : item.getMediaItems()) { MediaItem mi = solrMediaHandler.getSolrMediaItem(mediaItem.getId()); if(mi==null) { solrMediaHandler.insertMediaItem(mediaItem); } else { solrMediaHandler.insertMediaItem(mi); } } } } @Override public void update(Item update) throws IOException { store(update); } @Override public boolean delete(String itemId) throws IOException { //logger.info("Delete item with id " + itemId + " from Solr."); solrItemHandler.deleteItem(itemId); return true; } @Override public boolean checkStatus(StreamUpdateStorage storage) { if(itemsCollection != null){ try { solrItemHandler.checkServerStatus(); return true; } catch (Exception e) { // TODO Auto-generated catch block return false; } } if(mediaItemsCollection != null){ try { solrMediaHandler.checkServerStatus(); return true; } catch (Exception e) { // TODO Auto-generated catch block return false; } } if(newsFeedCollection != null){ try { solrNewsFeedHandler.checkServerStatus(); return true; } catch (Exception e) { // TODO Auto-generated catch block return false; } } return false; } @Override public void close() { try { commit(); } catch (IOException e) { e.printStackTrace(); } } @Override public void updateTimeslot() { try { commit(); } catch (Exception e) { logger.error(e); } } private void commit() throws IOException { try { commit(); } catch (Exception e) { logger.error(e); } } @Override public String getStorageName(){ return this.storageName; } public static void main(String[] args) throws IOException { } }
changes to store specifically facebook and twitter messages in solr
src/main/java/eu/socialsensor/sfc/streams/store/SolrStorage.java
changes to store specifically facebook and twitter messages in solr
Java
apache-2.0
1fdda939043e798e16101a313e9e44647cca9dec
0
soujava/Camel-Pax-Exam-Demo,ANierbeck/Camel-Pax-Exam-Demo
package de.nierbeck.camel.exam.demo.control.route; import static org.ops4j.pax.exam.CoreOptions.maven; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.streamBundle; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.features; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.karafDistributionConfiguration; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.keepRuntimeFolder; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.logLevel; import static org.ops4j.pax.tinybundles.core.TinyBundles.bundle; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.util.Dictionary; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.jms.ConnectionFactory; import javax.sql.DataSource; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.ProducerTemplate; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.felix.service.command.CommandProcessor; import org.apache.felix.service.command.CommandSession; import org.apache.karaf.features.FeaturesService; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.Configuration; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.ProbeBuilder; import org.ops4j.pax.exam.TestProbeBuilder; import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.karaf.options.KarafDistributionOption; import org.ops4j.pax.exam.karaf.options.LogLevelOption.LogLevel; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.Filter; import org.osgi.framework.FrameworkUtil; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; import org.osgi.util.tracker.ServiceTracker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.nierbeck.camel.exam.demo.control.CamelMessageBean; import de.nierbeck.camel.exam.demo.control.JmsDestinations; import de.nierbeck.camel.exam.demo.control.RouteID; import de.nierbeck.camel.exam.demo.control.WebServiceOrder; import de.nierbeck.camel.exam.demo.control.internal.OrderWebServiceRoute; import de.nierbeck.camel.exam.demo.control.internal.OutMessageProcessor; import de.nierbeck.camel.exam.demo.control.internal.converter.MessageLogConverter; import de.nierbeck.camel.exam.demo.entities.CamelMessage; import de.nierbeck.camel.exam.demo.entities.dao.CamelMessageStoreDao; import de.nierbeck.camel.exam.demo.testutil.TestUtility; @RunWith(PaxExam.class) public class KarafRoutingTest extends CamelTestSupport { protected transient Logger log = LoggerFactory.getLogger(getClass()); ExecutorService executor = Executors.newCachedThreadPool(); static final Long COMMAND_TIMEOUT = 10000L; static final Long DEFAULT_TIMEOUT = 20000L; static final Long SERVICE_TIMEOUT = 30000L; @Inject protected FeaturesService featuresService; @Inject protected BundleContext bundleContext; @Inject protected DataSource dataSource; @Inject protected CamelMessageStoreDao orderMergingDao; @Inject private ConnectionFactory connectionFactory; private CamelContext controlContext; private CamelContext testContext; @Configuration public static Option[] configure() throws Exception { return new Option[] { karafDistributionConfiguration() .frameworkUrl( maven().groupId("org.apache.karaf").artifactId("apache-karaf").type("zip") .versionAsInProject()).useDeployFolder(false).karafVersion("3.0.0") .unpackDirectory(new File("target/paxexam/unpack/")), logLevel(LogLevel.WARN), features( maven().groupId("org.apache.karaf.features").artifactId("standard").type("xml") .classifier("features").versionAsInProject(), "http-whiteboard"), features( maven().groupId("org.apache.karaf.features").artifactId("enterprise").type("xml") .classifier("features").versionAsInProject(), "transaction", "jpa", "jndi"), features( maven().groupId("org.apache.activemq").artifactId("activemq-karaf").type("xml") .classifier("features").versionAsInProject(), "activemq-blueprint", "activemq-camel"), features( maven().groupId("org.apache.cxf.karaf").artifactId("apache-cxf").type("xml") .classifier("features").versionAsInProject(), "cxf-jaxws"), features( maven().groupId("org.apache.camel.karaf").artifactId("apache-camel").type("xml") .classifier("features").versionAsInProject(), "camel-blueprint", "camel-jms", "camel-jpa", "camel-mvel", "camel-jdbc", "camel-cxf", "camel-test"), KarafDistributionOption.editConfigurationFilePut("etc/org.ops4j.pax.url.mvn.cfg", "org.ops4j.pax.url.mvn.proxySupport", "true"), keepRuntimeFolder(), mavenBundle().groupId("com.h2database").artifactId("h2").version("1.3.167"), mavenBundle().groupId("de.nierbeck.camel.exam.demo").artifactId("entities").versionAsInProject(), mavenBundle().groupId("org.ops4j.pax.tipi").artifactId("org.ops4j.pax.tipi.hamcrest.core") .versionAsInProject(), streamBundle( bundle().add("OSGI-INF/blueprint/datasource.xml", new File("src/sample/resources/datasource.xml").toURL()) .set(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.datasource") .set(Constants.DYNAMICIMPORT_PACKAGE, "*").build()).start(), streamBundle( bundle().add("OSGI-INF/blueprint/mqbroker.xml", new File("src/sample/resources/mqbroker-test.xml").toURL()) .set(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.broker") .set(Constants.DYNAMICIMPORT_PACKAGE, "*").build()).start(), streamBundle( bundle().add(JmsDestinations.class) .add(WebServiceOrder.class) .add(CamelMessageBean.class) .add(RouteID.class) .add(OrderWebServiceRoute.class) .add(OutMessageProcessor.class) .add(MessageLogConverter.class) .add("OSGI-INF/blueprint/camel-main-context.xml", new File("src/main/resources/OSGI-INF/blueprint/camel-context.xml") .toURL()) .add("OSGI-INF/blueprint/jms-context.xml", new File("src/main/resources/OSGI-INF/blueprint/jms-config.xml").toURL()) .add("wsdl/WebServiceOrder.wsdl", new File("target/generated/wsdl/WebServiceOrder.wsdl").toURL()) .set(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.route-control") .set(Constants.DYNAMICIMPORT_PACKAGE, "*") .set(Constants.EXPORT_PACKAGE, "wsdl, de.nierbeck.camel.exam.demo.control").build()) .start() }; } /** * @param probe * @return */ @ProbeBuilder public TestProbeBuilder probeConfiguration(TestProbeBuilder probe) { // makes sure the generated Test-Bundle contains this import! probe.setHeader(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.route-control-test"); probe.setHeader(Constants.DYNAMICIMPORT_PACKAGE, "de.nierbeck.camel.exam.demo.control,*,org.apache.felix.service.*;status=provisional"); return probe; } @Override public boolean isCreateCamelContextPerClass() { // we override this method and return true, to tell Camel test-kit that // it should only create CamelContext once (per class), so we will // re-use the CamelContext between each test method in this class return true; } @Override protected void doPreSetup() throws Exception { controlContext = getOsgiService(CamelContext.class, "(camel.context.name=route-control)", 10000); assertNotNull(controlContext); testContext = getOsgiService(CamelContext.class, "(camel.context.name=route-test)", 10000); assertNotNull(testContext); for (CamelMessage orderMerging : orderMergingDao.findAll()) { orderMergingDao.makeTransient(orderMerging); } } @Test public void test() throws Exception { assertTrue(featuresService.isInstalled(featuresService.getFeature("camel-jpa"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("camel-core"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("camel-blueprint"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("activemq-camel"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("http-whiteboard"))); // Use these for debugging when test doesn't work right. // System.err.println(executeCommand("features:list")); // System.err.println(executeCommand("camel:route-list")); // System.err.println(executeCommand("list")); String command = executeCommand("camel:context-list"); System.err.println(command); assertTrue("Doesn't contain desired camel-contexts", command.contains("route-control")); assertTrue("Doesn't contain desired camel-contexts", command.contains("route-test")); } @Test public final void testSendMessage() throws Exception { final CamelMessageBean body = new CamelMessageBean(); body.setMessage("Testmessage"); body.setTmstamp(Long.toString(System.currentTimeMillis())); MockEndpoint mockEndpoint = (MockEndpoint) testContext.getEndpoint("mock:OrderRoute"); mockEndpoint.expectedMessageCount(1); ProducerTemplate template = testContext.createProducerTemplate(); template.start(); template.send("direct:start", new Processor() { public void process(Exchange exchange) { Message in = exchange.getIn(); in.setBody(body); } }); mockEndpoint.assertIsSatisfied(2500); } // Below are methods used for testing --> should be moved outside of // testclass /** * Executes a shell command and returns output as a String. Commands have a * default timeout of 10 seconds. * * @param command * @return */ protected String executeCommand(final String command) { return executeCommand(command, COMMAND_TIMEOUT, false); } /** * Executes a shell command and returns output as a String. Commands have a * default timeout of 10 seconds. * * @param command * The command to execute. * @param timeout * The amount of time in millis to wait for the command to * execute. * @param silent * Specifies if the command should be displayed in the screen. * @return */ protected String executeCommand(final String command, final Long timeout, final Boolean silent) { String response; final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(byteArrayOutputStream); final CommandProcessor commandProcessor = getOsgiService(CommandProcessor.class); final CommandSession commandSession = commandProcessor.createSession(System.in, printStream, System.err); FutureTask<String> commandFuture = new FutureTask<String>(new Callable<String>() { public String call() { try { if (!silent) { System.err.println(command); } commandSession.execute(command); } catch (Exception e) { e.printStackTrace(System.err); } printStream.flush(); return byteArrayOutputStream.toString(); } }); try { executor.submit(commandFuture); response = commandFuture.get(timeout, TimeUnit.MILLISECONDS); } catch (Exception e) { e.printStackTrace(System.err); response = "SHELL COMMAND TIMED OUT: "; } return response; } /** * Executes multiple commands inside a Single Session. Commands have a * default timeout of 10 seconds. * * @param commands * @return */ protected String executeCommands(final String... commands) { String response; final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(byteArrayOutputStream); final CommandProcessor commandProcessor = getOsgiService(CommandProcessor.class); final CommandSession commandSession = commandProcessor.createSession(System.in, printStream, System.err); FutureTask<String> commandFuture = new FutureTask<String>(new Callable<String>() { public String call() { try { for (String command : commands) { System.err.println(command); commandSession.execute(command); } } catch (Exception e) { e.printStackTrace(System.err); } return byteArrayOutputStream.toString(); } }); try { executor.submit(commandFuture); response = commandFuture.get(COMMAND_TIMEOUT, TimeUnit.MILLISECONDS); } catch (Exception e) { e.printStackTrace(System.err); response = "SHELL COMMAND TIMED OUT: "; } return response; } protected <T> T getOsgiService(Class<T> type, long timeout) { return getOsgiService(type, null, timeout); } protected <T> T getOsgiService(Class<T> type) { return getOsgiService(type, null, SERVICE_TIMEOUT); } protected <T> T getOsgiService(Class<T> type, String filter, long timeout) { ServiceTracker tracker = null; try { String flt; if (filter != null) { if (filter.startsWith("(")) { flt = "(&(" + Constants.OBJECTCLASS + "=" + type.getName() + ")" + filter + ")"; } else { flt = "(&(" + Constants.OBJECTCLASS + "=" + type.getName() + ")(" + filter + "))"; } } else { flt = "(" + Constants.OBJECTCLASS + "=" + type.getName() + ")"; } Filter osgiFilter = FrameworkUtil.createFilter(flt); tracker = new ServiceTracker(bundleContext, osgiFilter, null); tracker.open(true); // Note that the tracker is not closed to keep the reference // This is buggy, as the service reference may change i think Object svc = type.cast(tracker.waitForService(timeout)); if (svc == null) { Dictionary dic = bundleContext.getBundle().getHeaders(); System.err.println("Test bundle headers: " + TestUtility.explode(dic)); for (ServiceReference ref : TestUtility.asCollection(bundleContext.getAllServiceReferences(null, null))) { System.err.println("ServiceReference: " + ref); } for (ServiceReference ref : TestUtility.asCollection(bundleContext.getAllServiceReferences(null, flt))) { System.err.println("Filtered ServiceReference: " + ref); } throw new RuntimeException("Gave up waiting for service " + flt); } return type.cast(svc); } catch (InvalidSyntaxException e) { throw new IllegalArgumentException("Invalid filter", e); } catch (InterruptedException e) { throw new RuntimeException(e); } } }
route-control/src/test/java/de/nierbeck/camel/exam/demo/control/route/KarafRoutingTest.java
package de.nierbeck.camel.exam.demo.control.route; import static org.ops4j.pax.exam.CoreOptions.maven; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.streamBundle; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.features; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.karafDistributionConfiguration; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.keepRuntimeFolder; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.logLevel; import static org.ops4j.pax.tinybundles.core.TinyBundles.bundle; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.util.Dictionary; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.jms.ConnectionFactory; import javax.sql.DataSource; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.ProducerTemplate; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.felix.service.command.CommandProcessor; import org.apache.felix.service.command.CommandSession; import org.apache.karaf.features.FeaturesService; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.Configuration; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.ProbeBuilder; import org.ops4j.pax.exam.TestProbeBuilder; import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.karaf.options.KarafDistributionOption; import org.ops4j.pax.exam.karaf.options.LogLevelOption.LogLevel; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.Filter; import org.osgi.framework.FrameworkUtil; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; import org.osgi.util.tracker.ServiceTracker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.nierbeck.camel.exam.demo.control.CamelMessageBean; import de.nierbeck.camel.exam.demo.control.JmsDestinations; import de.nierbeck.camel.exam.demo.control.RouteID; import de.nierbeck.camel.exam.demo.control.WebServiceOrder; import de.nierbeck.camel.exam.demo.control.internal.OrderWebServiceRoute; import de.nierbeck.camel.exam.demo.control.internal.OutMessageProcessor; import de.nierbeck.camel.exam.demo.control.internal.converter.MessageLogConverter; import de.nierbeck.camel.exam.demo.entities.CamelMessage; import de.nierbeck.camel.exam.demo.entities.dao.CamelMessageStoreDao; import de.nierbeck.camel.exam.demo.testutil.TestUtility; @RunWith(PaxExam.class) public class KarafRoutingTest extends CamelTestSupport { protected transient Logger log = LoggerFactory.getLogger(getClass()); ExecutorService executor = Executors.newCachedThreadPool(); static final Long COMMAND_TIMEOUT = 10000L; static final Long DEFAULT_TIMEOUT = 20000L; static final Long SERVICE_TIMEOUT = 30000L; @Inject protected FeaturesService featuresService; @Inject protected BundleContext bundleContext; @Inject protected DataSource dataSource; @Inject protected CamelMessageStoreDao orderMergingDao; @Inject private ConnectionFactory connectionFactory; private CamelContext controlContext; private CamelContext testContext; @Configuration public static Option[] configure() throws Exception { return new Option[] { karafDistributionConfiguration() .frameworkUrl( maven().groupId("org.apache.karaf").artifactId("apache-karaf").type("zip") .versionAsInProject()).useDeployFolder(false).karafVersion("3.0.0") .unpackDirectory(new File("target/paxexam/unpack/")), logLevel(LogLevel.INFO), features( maven().groupId("org.apache.karaf.features").artifactId("standard").type("xml") .classifier("features").versionAsInProject(), "http-whiteboard"), features( maven().groupId("org.apache.karaf.features").artifactId("enterprise").type("xml") .classifier("features").versionAsInProject(), "transaction", "jpa", "jndi"), features( maven().groupId("org.apache.activemq").artifactId("activemq-karaf").type("xml") .classifier("features").versionAsInProject(), "activemq-blueprint", "activemq-camel"), features( maven().groupId("org.apache.cxf.karaf").artifactId("apache-cxf").type("xml") .classifier("features").versionAsInProject(), "cxf-jaxws"), features( maven().groupId("org.apache.camel.karaf").artifactId("apache-camel").type("xml") .classifier("features").versionAsInProject(), "camel-blueprint", "camel-jms", "camel-jpa", "camel-mvel", "camel-jdbc", "camel-cxf", "camel-test"), KarafDistributionOption.editConfigurationFilePut("etc/org.ops4j.pax.url.mvn.cfg", "org.ops4j.pax.url.mvn.proxySupport", "true"), keepRuntimeFolder(), mavenBundle().groupId("com.h2database").artifactId("h2").version("1.3.167"), mavenBundle().groupId("de.nierbeck.camel.exam.demo").artifactId("entities").versionAsInProject(), mavenBundle().groupId("org.ops4j.pax.tipi").artifactId("org.ops4j.pax.tipi.hamcrest.core") .versionAsInProject(), streamBundle( bundle().add("OSGI-INF/blueprint/datasource.xml", new File("src/sample/resources/datasource.xml").toURL()) .set(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.datasource") .set(Constants.DYNAMICIMPORT_PACKAGE, "*").build()).start(), streamBundle( bundle().add("OSGI-INF/blueprint/mqbroker.xml", new File("src/sample/resources/mqbroker-test.xml").toURL()) .set(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.broker") .set(Constants.DYNAMICIMPORT_PACKAGE, "*").build()).start(), streamBundle( bundle().add(JmsDestinations.class) .add(WebServiceOrder.class) .add(CamelMessageBean.class) .add(RouteID.class) .add(OrderWebServiceRoute.class) .add(OutMessageProcessor.class) .add(MessageLogConverter.class) .add("OSGI-INF/blueprint/camel-main-context.xml", new File("src/main/resources/OSGI-INF/blueprint/camel-context.xml") .toURL()) .add("OSGI-INF/blueprint/jms-context.xml", new File("src/main/resources/OSGI-INF/blueprint/jms-config.xml").toURL()) .add("wsdl/WebServiceOrder.wsdl", new File("target/generated/wsdl/WebServiceOrder.wsdl").toURL()) .set(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.route-control") .set(Constants.DYNAMICIMPORT_PACKAGE, "*") .set(Constants.EXPORT_PACKAGE, "wsdl, de.nierbeck.camel.exam.demo.control").build()) .start() }; } /** * @param probe * @return */ @ProbeBuilder public TestProbeBuilder probeConfiguration(TestProbeBuilder probe) { // makes sure the generated Test-Bundle contains this import! probe.setHeader(Constants.BUNDLE_SYMBOLICNAME, "de.nierbeck.camel.exam.demo.route-control-test"); probe.setHeader(Constants.DYNAMICIMPORT_PACKAGE, "de.nierbeck.camel.exam.demo.control,*,org.apache.felix.service.*;status=provisional"); return probe; } @Override public boolean isCreateCamelContextPerClass() { // we override this method and return true, to tell Camel test-kit that // it should only create CamelContext once (per class), so we will // re-use the CamelContext between each test method in this class return true; } @Override protected void doPreSetup() throws Exception { controlContext = getOsgiService(CamelContext.class, "(camel.context.name=route-control)", 10000); assertNotNull(controlContext); testContext = getOsgiService(CamelContext.class, "(camel.context.name=route-test)", 10000); assertNotNull(testContext); for (CamelMessage orderMerging : orderMergingDao.findAll()) { orderMergingDao.makeTransient(orderMerging); } } @Test public void test() throws Exception { assertTrue(featuresService.isInstalled(featuresService.getFeature("camel-jpa"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("camel-core"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("camel-blueprint"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("activemq-camel"))); assertTrue(featuresService.isInstalled(featuresService.getFeature("http-whiteboard"))); // Use these for debugging when test doesn't work right. // System.err.println(executeCommand("features:list")); // System.err.println(executeCommand("camel:route-list")); // System.err.println(executeCommand("list")); String command = executeCommand("camel:context-list"); System.err.println(command); assertTrue("Doesn't contain desired camel-contexts", command.contains("route-control")); assertTrue("Doesn't contain desired camel-contexts", command.contains("route-test")); } @Test public final void testSendMessage() throws Exception { final CamelMessageBean body = new CamelMessageBean(); body.setMessage("Testmessage"); body.setTmstamp(Long.toString(System.currentTimeMillis())); MockEndpoint mockEndpoint = (MockEndpoint) testContext.getEndpoint("mock:OrderRoute"); mockEndpoint.expectedMessageCount(1); ProducerTemplate template = testContext.createProducerTemplate(); template.start(); template.send("direct:start", new Processor() { public void process(Exchange exchange) { Message in = exchange.getIn(); in.setBody(body); } }); mockEndpoint.assertIsSatisfied(2500); } // Below are methods used for testing --> should be moved outside of // testclass /** * Executes a shell command and returns output as a String. Commands have a * default timeout of 10 seconds. * * @param command * @return */ protected String executeCommand(final String command) { return executeCommand(command, COMMAND_TIMEOUT, false); } /** * Executes a shell command and returns output as a String. Commands have a * default timeout of 10 seconds. * * @param command * The command to execute. * @param timeout * The amount of time in millis to wait for the command to * execute. * @param silent * Specifies if the command should be displayed in the screen. * @return */ protected String executeCommand(final String command, final Long timeout, final Boolean silent) { String response; final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(byteArrayOutputStream); final CommandProcessor commandProcessor = getOsgiService(CommandProcessor.class); final CommandSession commandSession = commandProcessor.createSession(System.in, printStream, System.err); FutureTask<String> commandFuture = new FutureTask<String>(new Callable<String>() { public String call() { try { if (!silent) { System.err.println(command); } commandSession.execute(command); } catch (Exception e) { e.printStackTrace(System.err); } printStream.flush(); return byteArrayOutputStream.toString(); } }); try { executor.submit(commandFuture); response = commandFuture.get(timeout, TimeUnit.MILLISECONDS); } catch (Exception e) { e.printStackTrace(System.err); response = "SHELL COMMAND TIMED OUT: "; } return response; } /** * Executes multiple commands inside a Single Session. Commands have a * default timeout of 10 seconds. * * @param commands * @return */ protected String executeCommands(final String... commands) { String response; final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(byteArrayOutputStream); final CommandProcessor commandProcessor = getOsgiService(CommandProcessor.class); final CommandSession commandSession = commandProcessor.createSession(System.in, printStream, System.err); FutureTask<String> commandFuture = new FutureTask<String>(new Callable<String>() { public String call() { try { for (String command : commands) { System.err.println(command); commandSession.execute(command); } } catch (Exception e) { e.printStackTrace(System.err); } return byteArrayOutputStream.toString(); } }); try { executor.submit(commandFuture); response = commandFuture.get(COMMAND_TIMEOUT, TimeUnit.MILLISECONDS); } catch (Exception e) { e.printStackTrace(System.err); response = "SHELL COMMAND TIMED OUT: "; } return response; } protected <T> T getOsgiService(Class<T> type, long timeout) { return getOsgiService(type, null, timeout); } protected <T> T getOsgiService(Class<T> type) { return getOsgiService(type, null, SERVICE_TIMEOUT); } protected <T> T getOsgiService(Class<T> type, String filter, long timeout) { ServiceTracker tracker = null; try { String flt; if (filter != null) { if (filter.startsWith("(")) { flt = "(&(" + Constants.OBJECTCLASS + "=" + type.getName() + ")" + filter + ")"; } else { flt = "(&(" + Constants.OBJECTCLASS + "=" + type.getName() + ")(" + filter + "))"; } } else { flt = "(" + Constants.OBJECTCLASS + "=" + type.getName() + ")"; } Filter osgiFilter = FrameworkUtil.createFilter(flt); tracker = new ServiceTracker(bundleContext, osgiFilter, null); tracker.open(true); // Note that the tracker is not closed to keep the reference // This is buggy, as the service reference may change i think Object svc = type.cast(tracker.waitForService(timeout)); if (svc == null) { Dictionary dic = bundleContext.getBundle().getHeaders(); System.err.println("Test bundle headers: " + TestUtility.explode(dic)); for (ServiceReference ref : TestUtility.asCollection(bundleContext.getAllServiceReferences(null, null))) { System.err.println("ServiceReference: " + ref); } for (ServiceReference ref : TestUtility.asCollection(bundleContext.getAllServiceReferences(null, flt))) { System.err.println("Filtered ServiceReference: " + ref); } throw new RuntimeException("Gave up waiting for service " + flt); } return type.cast(svc); } catch (InvalidSyntaxException e) { throw new IllegalArgumentException("Invalid filter", e); } catch (InterruptedException e) { throw new RuntimeException(e); } } }
reduced loglevel to warn
route-control/src/test/java/de/nierbeck/camel/exam/demo/control/route/KarafRoutingTest.java
reduced loglevel to warn
Java
apache-2.0
971f301f647b7167a0124e78e0ed213eef731579
0
vladmihalcea/high-performance-java-persistence,vladmihalcea/high-performance-java-persistence
package com.vladmihalcea.book.hpjp.hibernate.flushing; import org.hibernate.FlushMode; import org.hibernate.Session; import org.hibernate.query.Query; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * @author Vlad Mihalcea */ public class HibernateAutoFlushTest extends JPAAutoFlushTest { @Override protected boolean nativeHibernateSessionFactoryBootstrap() { return true; } @Test public void testFlushAutoNativeSQL() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); }); } @Test public void testFlushAutoNativeSQLFlushModeAlways() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); assertEquals( 1, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post" ) .unwrap(org.hibernate.query.Query.class) .setHibernateFlushMode(FlushMode.ALWAYS) .getSingleResult() ).intValue() ); }); } @Test public void testSessionModeAlways() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); entityManager .unwrap(Session.class) .setHibernateFlushMode(FlushMode.ALWAYS); assertEquals( 1, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); }); } @Test public void testFlushAutoNativeSQLSynchronizedEntityClass() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); int postCount = ((Number) entityManager .unwrap(Session.class) .createNativeQuery( "select count(*) " + "from post") .addSynchronizedEntityClass(Post.class) .getSingleResult()).intValue(); assertEquals(1, postCount); }); } @Test public void testFlushAutoNativeSQLSynchronizedQuerySpace() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); int postCount = ((Number) entityManager .unwrap(Session.class) .createNativeQuery( "select count(*) " + "from post") .addSynchronizedQuerySpace("post") .getSingleResult()).intValue(); assertEquals(1, postCount); }); } }
core/src/test/java/com/vladmihalcea/book/hpjp/hibernate/flushing/HibernateAutoFlushTest.java
package com.vladmihalcea.book.hpjp.hibernate.flushing; import org.hibernate.FlushMode; import org.hibernate.Session; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * @author Vlad Mihalcea */ public class HibernateAutoFlushTest extends JPAAutoFlushTest { @Override protected boolean nativeHibernateSessionFactoryBootstrap() { return true; } @Test @Ignore public void testFlushAutoNativeSQL() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); int postCount = ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult()).intValue(); assertEquals(1, postCount); }); } @Test public void testFlushAutoNativeSQLFlushModeAlways() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); int postCount = ((Number) entityManager .unwrap(Session.class) .createNativeQuery( "select count(*) " + "from post") .setFlushMode(FlushMode.ALWAYS) .getSingleResult()).intValue(); assertEquals(1, postCount); }); } @Test public void testFlushAutoNativeSQLSynchronizedEntityClass() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); int postCount = ((Number) entityManager .unwrap(Session.class) .createNativeQuery( "select count(*) " + "from post") .addSynchronizedEntityClass(Post.class) .getSingleResult()).intValue(); assertEquals(1, postCount); }); } @Test public void testFlushAutoNativeSQLSynchronizedQuerySpace() { doInJPA(entityManager -> { assertEquals( 0, ((Number) entityManager .createNativeQuery( "select count(*) " + "from post") .getSingleResult() ).intValue() ); entityManager.persist( new Post() .setTitle("High-Performance Java Persistence") ); int postCount = ((Number) entityManager .unwrap(Session.class) .createNativeQuery( "select count(*) " + "from post") .addSynchronizedQuerySpace("post") .getSingleResult()).intValue(); assertEquals(1, postCount); }); } }
Add more Hibernate ALWAYS flush mode examples
core/src/test/java/com/vladmihalcea/book/hpjp/hibernate/flushing/HibernateAutoFlushTest.java
Add more Hibernate ALWAYS flush mode examples
Java
apache-2.0
f7cae576680c3456867f1685c433091df00fc752
0
kuali/rice-playground,ricepanda/rice-git2,ricepanda/rice-git2,ricepanda/rice-git3,ricepanda/rice-git3,kuali/rice-playground,ricepanda/rice-git3,kuali/rice-playground,ricepanda/rice-git3,kuali/rice-playground,ricepanda/rice-git2,ricepanda/rice-git2
/** * Copyright 2005-2013 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.sampleu.travel.dataobject; import edu.sampleu.travel.options.PostalCountryCode; import edu.sampleu.travel.options.PostalStateCode; import edu.sampleu.travel.options.TripType; import org.junit.Test; import org.kuali.rice.krad.service.KRADServiceLocator; import org.kuali.rice.krad.test.KRADTestCase; import org.kuali.rice.test.BaselineTestCase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * Tests basic {@code TravelDestination} persistence. * * @author Kuali Rice Team ([email protected]) */ @BaselineTestCase.BaselineMode(BaselineTestCase.Mode.CLEAR_DB) public class TravelDestinationTest extends KRADTestCase { private static final String TYPE_CODE = TripType.OS.getCode(); private static final String DESTINATION_NAME = PostalStateCode.PR.getLabel(); private static final String COUNTRY_CODE = PostalCountryCode.US.getCode(); private static final String STATE_CODE = PostalStateCode.PR.getCode(); /** * Tests basic {@code TravelDestination} persistence by saving it, reloading it, and checking the data. */ @Test public void testTravelDestination() { assertTrue(TravelDestination.class.getName() + " is not mapped in JPA", KRADServiceLocator.getDataObjectService().supports(TravelDestination.class)); String id = createTravelDestination(); TravelDestination travelDestination = KRADServiceLocator.getDataObjectService().find(TravelDestination.class, id); assertNotNull("Travel Destination ID is null", travelDestination.getTravelDestinationId()); assertEquals("Travel Destination name is incorrect", DESTINATION_NAME, travelDestination.getTravelDestinationName()); assertEquals("Travel Destination country is incorrect", COUNTRY_CODE, travelDestination.getCountryCd()); assertEquals("Travel Destination state is incorrect", STATE_CODE, travelDestination.getStateCd()); assertTrue("Travel Destination is not active", travelDestination.isActive()); } private String createTravelDestination() { TravelDestination travelDestination = new TravelDestination(); travelDestination.setTravelDestinationName(DESTINATION_NAME); travelDestination.setCountryCd(COUNTRY_CODE); travelDestination.setStateCd(STATE_CODE); travelDestination.setActive(true); return KRADServiceLocator.getDataObjectService().save(travelDestination).getTravelDestinationId(); } }
rice-framework/krad-it/src/test/java/edu.sampleu.travel.dataobject/TravelDestinationTest.java
/** * Copyright 2005-2013 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.sampleu.travel.dataobject; import edu.sampleu.travel.options.PostalCountryCode; import edu.sampleu.travel.options.PostalStateCode; import edu.sampleu.travel.options.TripType; import org.junit.Test; import org.kuali.rice.krad.service.KRADServiceLocator; import org.kuali.rice.krad.test.KRADTestCase; import org.kuali.rice.test.BaselineTestCase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * Tests basic {@code TravelDestination} persistence. * * @author Kuali Rice Team ([email protected]) */ @BaselineTestCase.BaselineMode(BaselineTestCase.Mode.CLEAR_DB) public class TravelDestinationTest extends KRADTestCase { private static final String TYPE_CODE = TripType.OS.getCode(); private static final String DESTINATION_NAME = PostalStateCode.PR.getLabel(); private static final String COUNTRY_CODE = PostalCountryCode.US.getCode(); private static final String STATE_CODE = PostalStateCode.PR.getCode(); /** * Tests basic {@code TravelDestination} persistence by saving it, reloading it, and checking the data. */ @Test public void testTravelDestination() { assertTrue(TravelDestination.class.getName() + " is not mapped in JPA", KRADServiceLocator.getDataObjectService().supports(TravelDestination.class)); String id = createTravelDestination(); TravelDestination travelDestination = KRADServiceLocator.getDataObjectService().find(TravelDestination.class, id); assertNotNull("Travel Destination ID is null", travelDestination.getTravelDestinationId()); //assertEquals("Travel Destination type is incorrect", TYPE_CODE, travelDestination.getTravelTypeCode()); assertEquals("Travel Destination name is incorrect", DESTINATION_NAME, travelDestination.getTravelDestinationName()); assertEquals("Travel Destination country is incorrect", COUNTRY_CODE, travelDestination.getCountryCd()); assertEquals("Travel Destination state is incorrect", STATE_CODE, travelDestination.getStateCd()); assertTrue("Travel Destination is not active", travelDestination.isActive()); } private String createTravelDestination() { TravelDestination travelDestination = new TravelDestination(); //travelDestination.setTravelTypeCode(TYPE_CODE); travelDestination.setTravelDestinationName(DESTINATION_NAME); travelDestination.setCountryCd(COUNTRY_CODE); travelDestination.setStateCd(STATE_CODE); travelDestination.setActive(true); return KRADServiceLocator.getDataObjectService().save(travelDestination).getTravelDestinationId(); } }
[KULRICE-11006] Advanced Lookup Demo - trans doc form & view. Updated to reflect removal of docTypeCode from primary destination. git-svn-id: 2a5d2b5a02908a0c4ba7967b726d8c4198d1b9ed@42474 7a7aa7f6-c479-11dc-97e2-85a2497f191d
rice-framework/krad-it/src/test/java/edu.sampleu.travel.dataobject/TravelDestinationTest.java
[KULRICE-11006] Advanced Lookup Demo - trans doc form & view. Updated to reflect removal of docTypeCode from primary destination.
Java
apache-2.0
70b2bb5b0db30e8bc6a2bcf5be0ad4c9b25ac257
0
webdsl/webdsl,webdsl/webdsl,webdsl/webdsl,webdsl/webdsl
package org.webdsl.tools.strategoxt; import java.io.IOException; import java.io.InputStream; import org.spoofax.interpreter.adapter.aterm.WrappedATermFactory; import org.spoofax.interpreter.core.InterpreterException; import org.spoofax.jsglr.InvalidParseTableException; import org.spoofax.jsglr.ParseTable; import org.spoofax.jsglr.ParseTableManager; import org.spoofax.jsglr.SGLR; import org.strategoxt.HybridInterpreter; import org.strategoxt.lang.terms.TermFactory; /** * Environment class that maintains shared objects. * * @author Lennart Kats <L.C.L.Kats add tudelft.nl> */ public final class Environment { private final static TermFactory factory = new TermFactory(); private final static WrappedATermFactory wrappedFactory = new WrappedATermFactory(); private final static ParseTableManager parseTableManager = new ParseTableManager(wrappedFactory.getFactory()); public static TermFactory getTermFactory() { return factory; } public static WrappedATermFactory getWrappedTermFactory() { return wrappedFactory; } public static SGLR createSGLR(ParseTable parseTable) { return new SGLR(wrappedFactory.getFactory(), parseTable); } public static HybridInterpreter createInterpreter() throws IOException, InterpreterException { HybridInterpreter result = new HybridInterpreter(factory); return result; } public static ParseTable loadParseTable(InputStream parseTable) throws IOException, InvalidParseTableException { SGLR.setWorkAroundMultipleLookahead(true); return parseTableManager.loadFromStream(parseTable); } }
java-runtime/src/org/webdsl/tools/strategoxt/Environment.java
package org.webdsl.tools.strategoxt; import java.io.IOException; import java.io.InputStream; import org.spoofax.interpreter.adapter.aterm.WrappedATermFactory; import org.spoofax.interpreter.core.InterpreterException; import org.spoofax.jsglr.InvalidParseTableException; import org.spoofax.jsglr.ParseTable; import org.spoofax.jsglr.ParseTableManager; import org.spoofax.jsglr.SGLR; import org.strategoxt.HybridInterpreter; import org.strategoxt.lang.terms.TermFactory; /** * Environment class that maintains shared objects. * * @author Lennart Kats <L.C.L.Kats add tudelft.nl> */ final class Environment { private final static TermFactory factory = new TermFactory(); private final static WrappedATermFactory wrappedFactory = new WrappedATermFactory(); private final static ParseTableManager parseTableManager = new ParseTableManager(wrappedFactory.getFactory()); public static TermFactory getTermFactory() { return factory; } public static WrappedATermFactory getWrappedTermFactory() { return wrappedFactory; } public static SGLR createSGLR(ParseTable parseTable) { return new SGLR(wrappedFactory.getFactory(), parseTable); } public static HybridInterpreter createInterpreter() throws IOException, InterpreterException { HybridInterpreter result = new HybridInterpreter(factory); return result; } public static ParseTable loadParseTable(InputStream parseTable) throws IOException, InvalidParseTableException { SGLR.setWorkAroundMultipleLookahead(true); return parseTableManager.loadFromStream(parseTable); } }
Made the Environment class public for use by the backend svn path=/webdsls/trunk/; revision=4105
java-runtime/src/org/webdsl/tools/strategoxt/Environment.java
Made the Environment class public for use by the backend
Java
apache-2.0
9bb6f56902a7b8661207014018b611856bb9776d
0
apache/jackrabbit-oak,mreutegg/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.segment.file; import static com.google.common.base.Preconditions.checkNotNull; import static java.lang.String.format; import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount; import javax.annotation.Nonnull; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; public class SizeDeltaGcEstimation implements GCEstimation { private final long delta; private final GCJournal gcJournal; private final long totalSize; private boolean gcNeeded; private String gcInfo = "unknown"; private boolean finished = false; public SizeDeltaGcEstimation(@Nonnull SegmentGCOptions opts, @Nonnull GCJournal gcJournal, long totalSize) { this.delta = checkNotNull(opts).getGcSizeDeltaEstimation(); this.gcJournal = checkNotNull(gcJournal); this.totalSize = totalSize; } @Override public boolean gcNeeded() { if (!finished) { run(); } return gcNeeded; } @Override public String gcLog() { if (!finished) { run(); } return gcInfo; } private void run() { if (finished) { return; } if (delta == 0) { gcNeeded = true; gcInfo = format( "Estimation skipped because the size delta value equals 0", delta); } else if (getPreviousCleanupSize() < 0) { gcNeeded = true; gcInfo = "Estimation skipped because of missing gc journal data (expected on first run)"; } else { long lastGc = getPreviousCleanupSize(); long gain = totalSize - lastGc; long gainP = 100 * (totalSize - lastGc) / totalSize; gcNeeded = gain > delta; gcInfo = format( "Segmentstore size has increased since the last compaction from %s (%s bytes) to %s (%s bytes), " + "an increase of %s (%s bytes) or %s%%. ", humanReadableByteCount(lastGc), lastGc, humanReadableByteCount(totalSize), totalSize, humanReadableByteCount(gain), gain, gainP); if (gcNeeded) { gcInfo = gcInfo + format( "This is greater than sizeDeltaEstimation=%s (%s bytes), so running compaction", humanReadableByteCount(delta), delta); } else { gcInfo = gcInfo + format( "This is less than sizeDeltaEstimation=%s (%s bytes), so skipping compaction", humanReadableByteCount(delta), delta); } } finished = true; } private long getPreviousCleanupSize() { return gcJournal.read().getRepoSize(); } }
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/SizeDeltaGcEstimation.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.segment.file; import static com.google.common.base.Preconditions.checkNotNull; import static java.lang.String.format; import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount; import javax.annotation.Nonnull; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; public class SizeDeltaGcEstimation implements GCEstimation { private final long delta; private final GCJournal gcJournal; private final long totalSize; private boolean gcNeeded; private String gcInfo = "unknown"; private boolean finished = false; public SizeDeltaGcEstimation(@Nonnull SegmentGCOptions opts, @Nonnull GCJournal gcJournal, long totalSize) { this.delta = checkNotNull(opts).getGcSizeDeltaEstimation(); this.gcJournal = checkNotNull(gcJournal); this.totalSize = totalSize; } @Override public boolean gcNeeded() { if (!finished) { run(); } return gcNeeded; } @Override public String gcLog() { if (!finished) { run(); } return gcInfo; } private void run() { if (finished) { return; } if (delta == 0) { gcNeeded = true; gcInfo = format( "Estimation skipped because the size delta value equals 0", delta); } else if (getPreviousCleanupSize() < 0) { gcNeeded = true; gcInfo = format("Estimation skipped because of missing gc journal data"); } else { long lastGc = getPreviousCleanupSize(); long gain = totalSize - lastGc; long gainP = 100 * (totalSize - lastGc) / totalSize; gcNeeded = gain > delta; if (gcNeeded) { gcInfo = format( "Size delta is %s%% or %s/%s (%s/%s bytes), so running compaction", gainP, humanReadableByteCount(lastGc), humanReadableByteCount(totalSize), lastGc, totalSize); } else { gcInfo = format( "Size delta is %s%% or %s/%s (%s/%s bytes), so skipping compaction for now", gainP, humanReadableByteCount(lastGc), humanReadableByteCount(totalSize), lastGc, totalSize); } } finished = true; } private long getPreviousCleanupSize() { return gcJournal.read().getRepoSize(); } }
OAK-4901 Improve SizeDeltaGcEstimation logging - patch by Valentin Olteanu git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1764039 13f79535-47bb-0310-9956-ffa450edef68
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/SizeDeltaGcEstimation.java
OAK-4901 Improve SizeDeltaGcEstimation logging - patch by Valentin Olteanu
Java
apache-2.0
e02261c763574ff9352dfe6aa58d051d1f569fb2
0
mpi2/PhenotypeArchive,mpi2/PhenotypeArchive,mpi2/PhenotypeArchive
package uk.ac.ebi.phenotype.solr.indexer; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import uk.ac.ebi.phenotype.pojo.BiologicalSampleType; import uk.ac.ebi.phenotype.pojo.SexType; import uk.ac.ebi.phenotype.pojo.ZygosityType; import uk.ac.ebi.phenotype.service.dto.ObservationDTO; import uk.ac.ebi.phenotype.solr.indexer.beans.ImpressBean; import uk.ac.ebi.phenotype.solr.indexer.utils.IndexerMap; import javax.sql.DataSource; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.*; /** * Populate the experiment core */ public class ObservationIndexer extends AbstractIndexer { private static final Logger logger = LoggerFactory.getLogger(ObservationIndexer.class); private static Connection connection; @Autowired @Qualifier("komp2DataSource") DataSource komp2DataSource; @Autowired @Qualifier("observationIndexing") SolrServer observationSolrServer; Map<String, BiologicalDataBean> biologicalData = new HashMap<>(); Map<String, BiologicalDataBean> lineBiologicalData = new HashMap<>(); Map<Integer, ImpressBean> pipelineMap = new HashMap<>(); Map<Integer, ImpressBean> procedureMap = new HashMap<>(); Map<Integer, ImpressBean> parameterMap = new HashMap<>(); Map<Integer, DatasourceBean> datasourceMap = new HashMap<>(); Map<Integer, DatasourceBean> projectMap = new HashMap<>(); Map<Integer, List<ParameterAssociationBean>> parameterAssociationMap = new HashMap<>(); Map<Integer, List<WeightBean>> weightMap = new HashMap<>(); Map<Integer, WeightBean> ipgttWeightMap = new HashMap<>(); Map<String, Map<String, String>> translateCategoryNames = new HashMap<>(); public final String ipgttWeightParamter = "IMPC_IPG_001_001"; public final List<String> weightParamters = Arrays.asList( "'IMPC_GRS_003_001'", "'IMPC_CAL_001_001'", "'IMPC_DXA_001_001'", "'IMPC_HWT_007_001'", "'IMPC_PAT_049_001'", "'IMPC_BWT_001_001'", "'IMPC_ABR_001_001'", "'IMPC_CHL_001_001'", "'TCP_CHL_001_001'", "'HMGU_ROT_004_001'", "'ESLIM_001_001_001'", "'ESLIM_002_001_001'", "'ESLIM_003_001_001'", "'ESLIM_004_001_001'", "'ESLIM_005_001_001'", "'ESLIM_020_001_001'", "'ESLIM_022_001_001'", "'ESLIM_009_001_003'", "'ESLIM_010_001_003'", "'ESLIM_011_001_011'", "'ESLIM_012_001_005'", "'ESLIM_013_001_018'", "'ESLIM_022_001_001'", "'GMC_916_001_022'", "'GMC_908_001_001'", "'GMC_900_001_001'", "'GMC_926_001_003'", "'GMC_922_001_002'", "'GMC_923_001_001'", "'GMC_921_001_002'", "'GMC_902_001_003'", "'GMC_912_001_018'", "'GMC_917_001_001'", "'GMC_920_001_001'", "'GMC_909_001_002'", "'GMC_914_001_001'" ); public final List<String> maleFertilityParameters = Arrays.asList( "IMPC_FER_001_001", "IMPC_FER_006_001", "IMPC_FER_007_001", "IMPC_FER_008_001", "IMPC_FER_009_001"); public final List<String> femaleFertilityParameters = Arrays.asList( "IMPC_FER_019_001", "IMPC_FER_010_001", "IMPC_FER_011_001", "IMPC_FER_012_001", "IMPC_FER_013_001"); public ObservationIndexer() { } @Override public void validateBuild() throws IndexerException { Long numFound = getDocumentCount(observationSolrServer); if (numFound <= MINIMUM_DOCUMENT_COUNT) throw new IndexerException(new ValidationException("Actual observation document count is " + numFound + ".")); if (numFound != documentCount) logger.warn("WARNING: Added " + documentCount + " observation documents but SOLR reports " + numFound + " documents."); else logger.info("validateBuild(): Indexed " + documentCount + " observation documents."); } public static void main(String[] args) throws IndexerException { ObservationIndexer main = new ObservationIndexer(); main.initialise(args); main.run(); main.validateBuild(); logger.info("Process finished. Exiting."); } @Override protected Logger getLogger() { return logger; } @Override public void initialise(String[] args) throws IndexerException { super.initialise(args); try { connection = komp2DataSource.getConnection(); logger.info("Populating impress maps"); pipelineMap = IndexerMap.getImpressPipelines(connection); procedureMap = IndexerMap.getImpressProcedures(connection); parameterMap = IndexerMap.getImpressParameters(connection); } catch (SQLException e) { throw new IndexerException(e); } printConfiguration(); } @Override public void run() throws IndexerException { Long start = System.currentTimeMillis(); try { logger.info("Populating data source, project, and category translation maps"); populateDatasourceDataMap(); populateCategoryNamesDataMap(); logger.info("Populating biological data maps"); populateBiologicalDataMap(); populateLineBiologicalDataMap(); populateParameterAssociationMap(); logger.info("Populating weight maps"); populateWeightMap(); populateIpgttWeightMap(); logger.info("Populating experiment solr core"); populateObservationSolrCore(); } catch (SolrServerException | SQLException | IOException e) { throw new IndexerException(e); } logger.info("Populating experiment solr core - done [took: {}s]", (System.currentTimeMillis() - start) / 1000.0); } public void populateObservationSolrCore() throws SQLException, IOException, SolrServerException { int count = 0; observationSolrServer.deleteByQuery("*:*"); String query = "SELECT o.id as id, o.db_id as datasource_id, o.parameter_id as parameter_id, o.parameter_stable_id, " + "o.observation_type, o.missing, o.parameter_status, o.parameter_status_message, " + "o.biological_sample_id, " + "e.project_id as project_id, e.pipeline_id as pipeline_id, e.procedure_id as procedure_id, " + "e.date_of_experiment, e.external_id, e.id as experiment_id, " + "e.metadata_combined as metadata_combined, e.metadata_group as metadata_group, " + "co.category as raw_category, " + "uo.data_point as unidimensional_data_point, " + "mo.data_point as multidimensional_data_point, " + "tso.data_point as time_series_data_point, " + "mo.order_index, " + "mo.dimension, " + "tso.time_point, " + "tso.discrete_point, " + "iro.file_type, " + "iro.download_file_path " + "FROM observation o " + "LEFT OUTER JOIN categorical_observation co ON o.id=co.id " + "LEFT OUTER JOIN unidimensional_observation uo ON o.id=uo.id " + "LEFT OUTER JOIN multidimensional_observation mo ON o.id=mo.id " + "LEFT OUTER JOIN time_series_observation tso ON o.id=tso.id " + "LEFT OUTER JOIN image_record_observation iro ON o.id=iro.id " + "INNER JOIN experiment_observation eo ON eo.observation_id=o.id " + "INNER JOIN experiment e on eo.experiment_id=e.id " + "WHERE o.missing=0"; try (PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { ObservationDTO o = new ObservationDTO(); o.setId(r.getInt("id")); o.setParameterId(r.getInt("parameter_id")); o.setExperimentId(r.getInt("experiment_id")); o.setDateOfExperiment(r.getDate("date_of_experiment")); o.setExperimentSourceId(r.getString("external_id")); o.setParameterId(parameterMap.get(r.getInt("parameter_id")).id); o.setParameterName(parameterMap.get(r.getInt("parameter_id")).name); o.setParameterStableId(parameterMap.get(r.getInt("parameter_id")).stableId); o.setProcedureId(procedureMap.get(r.getInt("procedure_id")).id); o.setProcedureName(procedureMap.get(r.getInt("procedure_id")).name); String procedureStableId = procedureMap.get(r.getInt("procedure_id")).stableId; o.setProcedureStableId(procedureStableId); o.setProcedureGroup(procedureStableId.substring(0, procedureStableId.lastIndexOf("_"))); o.setPipelineId(pipelineMap.get(r.getInt("pipeline_id")).id); o.setPipelineName(pipelineMap.get(r.getInt("pipeline_id")).name); o.setPipelineStableId(pipelineMap.get(r.getInt("pipeline_id")).stableId); o.setDataSourceId(datasourceMap.get(r.getInt("datasource_id")).id); o.setDataSourceName(datasourceMap.get(r.getInt("datasource_id")).name); o.setProjectId(projectMap.get(r.getInt("project_id")).id); o.setProjectName(projectMap.get(r.getInt("project_id")).name); o.setMetadataGroup(r.getString("metadata_group")); if (r.wasNull()) { o.setMetadataGroup(""); o.setMetadata(new ArrayList<String>()); } String metadataCombined = r.getString("metadata_combined"); if ( ! r.wasNull()) { o.setMetadata(new ArrayList<>(Arrays.asList(metadataCombined.split("::")))); } // Add the Biological data String bioSampleId = r.getString("biological_sample_id"); if (r.wasNull()) { // Line level data BiologicalDataBean b = lineBiologicalData.get(r.getString("experiment_id")); if (b == null) { logger.error("Cannot find biological data for experiment {}", r.getString("experiment_id")); continue; } o.setBiologicalModelId(b.biologicalModelId); o.setGeneAccession(b.geneAcc); o.setGeneSymbol(b.geneSymbol); o.setAlleleAccession(b.alleleAccession); o.setAlleleSymbol(b.alleleSymbol); o.setStrainAccessionId(b.strainAcc); o.setStrainName(b.strainName); o.setPhenotypingCenter(b.phenotypingCenterName); o.setPhenotypingCenterId(b.phenotypingCenterId); o.setColonyId(b.colonyId); // Viability applies to both sexes if (o.getParameterStableId().contains("_VIA_")) { o.setSex(SexType.both.getName()); } else { // Fertility applies to the sex tested, separate parameters per male//female if (maleFertilityParameters.contains(o.getParameterStableId())) { o.setSex(SexType.male.getName()); } else if (femaleFertilityParameters.contains(o.getParameterStableId())) { o.setSex(SexType.female.getName()); } if(o.getSex() == null) { o.setSex(SexType.both.getName()); } } if (b.zygosity != null) { o.setZygosity(b.zygosity); } else { // Default to hom o.setZygosity(ZygosityType.homozygote.getName()); } // All line level parameters are sample group "experimental" due to the nature of the // procedures (i.e. no control mice will go through VIA or FER procedures.) o.setGroup(BiologicalSampleType.experimental.getName()); } else { // Specimen level data BiologicalDataBean b = biologicalData.get(bioSampleId); o.setBiologicalModelId(b.biologicalModelId); o.setGeneAccession(b.geneAcc); o.setGeneSymbol(b.geneSymbol); o.setAlleleAccession(b.alleleAccession); o.setAlleleSymbol(b.alleleSymbol); o.setStrainAccessionId(b.strainAcc); o.setStrainName(b.strainName); o.setPhenotypingCenter(b.phenotypingCenterName); o.setPhenotypingCenterId(b.phenotypingCenterId); o.setColonyId(b.colonyId); o.setZygosity(b.zygosity); o.setDateOfBirth(b.dateOfBirth); o.setSex(b.sex); o.setGroup(b.sampleGroup); o.setBiologicalSampleId(b.biologicalSampleId); o.setExternalSampleId(b.externalSampleId); } o.setObservationType(r.getString("observation_type")); String cat = r.getString("raw_category"); if ( ! r.wasNull()) { String param = r.getString("parameter_stable_id"); if (translateCategoryNames.containsKey(param)) { String transCat = translateCategoryNames.get(param).get(cat); if (transCat != null && ! transCat.equals("")) { o.setCategory(transCat); } else { o.setCategory(cat); } } else { o.setCategory(cat); } } // Add the correct "data point" for the type switch (r.getString("observation_type")) { case "unidimensional": o.setDataPoint(r.getFloat("unidimensional_data_point")); break; case "multidimensional": o.setDataPoint(r.getFloat("multidimensional_data_point")); break; case "time_series": o.setDataPoint(r.getFloat("time_series_data_point")); break; } Integer order_index = r.getInt("order_index"); if ( ! r.wasNull()) { o.setOrderIndex(order_index); } String dimension = r.getString("dimension"); if ( ! r.wasNull()) { o.setDimension(dimension); } String time_point = r.getString("time_point"); if ( ! r.wasNull()) { o.setTimePoint(time_point); } Float discrete_point = r.getFloat("discrete_point"); if ( ! r.wasNull()) { o.setDiscretePoint(discrete_point); } String file_type = r.getString("file_type"); if ( ! r.wasNull()) { o.setFileType(file_type); } String download_file_path = r.getString("download_file_path"); if ( ! r.wasNull()) { o.setDownloadFilePath(download_file_path); } if (parameterAssociationMap.containsKey(r.getInt("id"))) { for (ParameterAssociationBean pb : parameterAssociationMap.get(r.getInt("id"))) { // Will never be null, we hope o.addParameterAssociationStableId(pb.parameterStableId); o.addParameterAssociationName(pb.parameterAssociationName); if (StringUtils.isNotEmpty(pb.parameterAssociationValue)) { o.addParameterAssociationValue(pb.parameterAssociationValue); } if (StringUtils.isNotEmpty(pb.sequenceId)) { o.addParameterAssociationSequenceId(pb.sequenceId); } if (StringUtils.isNotEmpty(pb.dimId)) { o.addParameterAssociationDimId(pb.dimId); } } } // Add weight parameters only if this observation isn't for a weight parameter if ( ! weightParamters.contains(o.getParameterStableId()) && ! ipgttWeightParamter.equals(o.getParameterStableId())) { WeightBean b = getNearestWeight(o.getBiologicalSampleId(), o.getDateOfExperiment()); if (o.getParameterStableId().equals(ipgttWeightParamter)) { b = getNearestIpgttWeight(o.getBiologicalSampleId()); } if (b != null) { o.setWeight(b.weight); o.setWeightDate(b.date); o.setWeightDaysOld(b.daysOld); o.setWeightParameterStableId(b.parameterStableId); } } // 60 seconds between commits documentCount++; observationSolrServer.addBean(o, 60000); count ++; if (count % 100000 == 0) { logger.info(" added " + count + " beans"); } } // Final commit to save the rest of the docs observationSolrServer.commit(); } catch (Exception e) { logger.error("Big error {}", e.getMessage(), e); } } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateBiologicalDataMap() throws SQLException { String query = "SELECT CAST(bs.id AS CHAR) as biological_sample_id, bs.organisation_id as phenotyping_center_id, " + "org.name as phenotyping_center_name, bs.sample_group, bs.external_id as external_sample_id, " + "ls.date_of_birth, ls.colony_id, ls.sex as sex, ls.zygosity, " + "bms.biological_model_id, " + "strain.acc as strain_acc, strain.name as strain_name, " + "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bms.biological_model_id) as allele_accession, " + "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bms.biological_model_id) as allele_symbol, " + "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bms.biological_model_id) as acc, " + "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bms.biological_model_id) as symbol " + "FROM biological_sample bs " + "INNER JOIN organisation org ON bs.organisation_id=org.id " + "INNER JOIN live_sample ls ON bs.id=ls.id " + "INNER JOIN biological_model_sample bms ON bs.id=bms.biological_sample_id " + "INNER JOIN biological_model_strain bmstrain ON bmstrain.biological_model_id=bms.biological_model_id " + "INNER JOIN strain strain ON strain.acc=bmstrain.strain_acc"; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { BiologicalDataBean b = new BiologicalDataBean(); b.alleleAccession = resultSet.getString("allele_accession"); b.alleleSymbol = resultSet.getString("allele_symbol"); b.biologicalModelId = resultSet.getInt("biological_model_id"); b.biologicalSampleId = resultSet.getInt("biological_sample_id"); b.colonyId = resultSet.getString("colony_id"); b.dateOfBirth = resultSet.getDate("date_of_birth"); b.externalSampleId = resultSet.getString("external_sample_id"); b.geneAcc = resultSet.getString("acc"); b.geneSymbol = resultSet.getString("symbol"); b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id"); b.phenotypingCenterName = resultSet.getString("phenotyping_center_name"); b.sampleGroup = resultSet.getString("sample_group"); b.sex = resultSet.getString("sex"); b.strainAcc = resultSet.getString("strain_acc"); b.strainName = resultSet.getString("strain_name"); b.zygosity = resultSet.getString("zygosity"); biologicalData.put(resultSet.getString("biological_sample_id"), b); } } } /** * Add all the relevant data required quickly looking up biological data * associated to a biological model (really an experiment) * * @throws SQLException when a database exception occurs */ public void populateLineBiologicalDataMap() throws SQLException { String query = "SELECT e.id as experiment_id, e.colony_id, e.biological_model_id, " + "e.organisation_id as phenotyping_center_id, org.name as phenotyping_center_name, " + "strain.acc as strain_acc, strain.name as strain_name, " + "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=e.biological_model_id) as allele_accession, " + "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=e.biological_model_id) as allele_symbol, " + "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=e.biological_model_id) as acc, " + "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=e.biological_model_id) as symbol " + "FROM experiment e " + "INNER JOIN organisation org ON e.organisation_id=org.id " + "INNER JOIN biological_model_strain bm_strain ON bm_strain.biological_model_id=e.biological_model_id " + "INNER JOIN strain strain ON strain.acc=bm_strain.strain_acc"; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { BiologicalDataBean b = new BiologicalDataBean(); b.colonyId = resultSet.getString("colony_id"); b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id"); b.phenotypingCenterName = resultSet.getString("phenotyping_center_name"); b.strainAcc = resultSet.getString("strain_acc"); b.strainName = resultSet.getString("strain_name"); b.alleleAccession = resultSet.getString("allele_accession"); b.alleleSymbol = resultSet.getString("allele_symbol"); b.biologicalModelId = resultSet.getInt("biological_model_id"); b.geneAcc = resultSet.getString("acc"); b.geneSymbol = resultSet.getString("symbol"); if (b.alleleAccession == null && b.colonyId != null) { // Override the biological model with one that has the // correct gene/allele/strain String query2 = "SELECT DISTINCT bm.id as biological_model_id, " + " (select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bm.id) as allele_accession, " + " (select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) as allele_symbol, " + " (select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bm.id) as acc, " + " (select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id) as symbol " + " FROM live_sample ls " + " INNER JOIN biological_model_sample bms ON bms.biological_sample_id=ls.id " + " INNER JOIN biological_model bm ON bm.id=bms.biological_model_id " + " INNER JOIN biological_model_strain bm_strain ON bm_strain.biological_model_id=bm.id " + " WHERE bm.allelic_composition !='' AND ls.colony_id = ? LIMIT 1 " ; try (PreparedStatement p2 = connection.prepareStatement(query2)) { p2.setString(1, resultSet.getString("colony_id")); ResultSet resultSet2 = p2.executeQuery(); while (resultSet2.next()) { b.strainAcc = resultSet2.getString("strain_acc"); b.strainName = resultSet2.getString("strain_name"); b.alleleAccession = resultSet2.getString("allele_accession"); b.alleleSymbol = resultSet2.getString("allele_symbol"); b.biologicalModelId = resultSet2.getInt("biological_model_id"); b.geneAcc = resultSet2.getString("acc"); b.geneSymbol = resultSet2.getString("symbol"); break; } } } lineBiologicalData.put(resultSet.getString("experiment_id"), b); } } } /** * Add all the relevant data required for translating the category names in * the cases where the category names are numerals, but the actual name is * in the description field * * @throws SQLException when a database exception occurs */ public void populateCategoryNamesDataMap() throws SQLException { String query = "SELECT pp.stable_id, ppo.name, ppo.description FROM phenotype_parameter pp \n" + "INNER JOIN phenotype_parameter_lnk_option pplo ON pp.id=pplo.parameter_id\n" + "INNER JOIN phenotype_parameter_option ppo ON ppo.id=pplo.option_id \n" + "WHERE ppo.name NOT REGEXP '^[a-zA-Z]' AND ppo.description!=''"; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { String stableId = resultSet.getString("stable_id"); if ( ! translateCategoryNames.containsKey(stableId)) { translateCategoryNames.put(stableId, new HashMap<String, String>()); } translateCategoryNames.get(stableId).put(resultSet.getString("name"), resultSet.getString("description")); } } } public void populateParameterAssociationMap() throws SQLException { Map<String, String> stableIdToNameMap = this.getAllParameters(); String query = "SELECT id, observation_id, parameter_id, sequence_id, dim_id, parameter_association_value FROM parameter_association"; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { Integer obsId = resultSet.getInt("observation_id"); ParameterAssociationBean pb = new ParameterAssociationBean(); pb.observationId = obsId; pb.parameterStableId = resultSet.getString("parameter_id"); pb.parameterAssociationValue = resultSet.getString("parameter_association_value"); if (stableIdToNameMap.get(pb.parameterStableId) != null) { pb.parameterAssociationName = stableIdToNameMap.get(pb.parameterStableId); } pb.sequenceId = resultSet.getString("sequence_id"); pb.dimId = resultSet.getString("dim_id"); if ( ! parameterAssociationMap.containsKey(obsId)) { parameterAssociationMap.put(obsId, new ArrayList<ParameterAssociationBean>()); } parameterAssociationMap.get(obsId).add(pb); } } } /** * Return all parameter stable ids and names * * @exception SQLException When a database error occurrs */ public Map<String, String> getAllParameters() throws SQLException { Map<String, String> parameters = new HashMap<>(); String query = "SELECT stable_id, name FROM komp2.phenotype_parameter"; try (PreparedStatement statement = getConnection().prepareStatement(query)) { ResultSet resultSet = statement.executeQuery(); while (resultSet.next()) { parameters.put(resultSet.getString("stable_id"), resultSet.getString("name")); } } return parameters; } public void populateDatasourceDataMap() throws SQLException { List<String> queries = new ArrayList<>(); queries.add("SELECT id, short_name as name, 'DATASOURCE' as datasource_type FROM external_db"); queries.add("SELECT id, name, 'PROJECT' as datasource_type FROM project"); for (String query : queries) { try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { DatasourceBean b = new DatasourceBean(); b.id = resultSet.getInt("id"); b.name = resultSet.getString("name"); switch (resultSet.getString("datasource_type")) { case "DATASOURCE": datasourceMap.put(resultSet.getInt("id"), b); break; case "PROJECT": projectMap.put(resultSet.getInt("id"), b); break; } } } } } /** * Compare all weight dates to select the nearest to the date of experiment * @param specimenID the specimen * @param dateOfExperiment the date * @return the nearest weight bean to the date of the experiment */ public WeightBean getNearestWeight(Integer specimenID, Date dateOfExperiment) { WeightBean nearest = null; if ( weightMap.containsKey(specimenID) ) { for (WeightBean candidate : weightMap.get(specimenID)) { if (nearest == null) { nearest = candidate; continue; } if (Math.abs(dateOfExperiment.getTime() - candidate.date.getTime()) < Math.abs(nearest.date.getTime() - candidate.date.getTime())) { nearest = candidate; } } } // Do not return weight that is > 4 days away from the experiment // since the weight of the specimen become less and less relevant // (Heuristic from Natasha Karp @ WTSI) // 4 days = 345,600,000 ms if (nearest != null && Math.abs(dateOfExperiment.getTime()-nearest.date.getTime()) > 3.456E8) { nearest = null; } return nearest; } /** * Select date of experiment * @param specimenID the specimen * @return the nearest weight bean to the date of the experiment */ public WeightBean getNearestIpgttWeight(Integer specimenID) { WeightBean nearest = null; if ( ipgttWeightMap.containsKey(specimenID) ) { nearest = ipgttWeightMap.get(specimenID); } return nearest; } /** * Return map of specimen ID => List of all weights ordered by date ASC * * @exception SQLException When a database error occurs */ public void populateWeightMap() throws SQLException { int count=0; String query = "SELECT\n" + " o.biological_sample_id, \n" + " data_point AS weight, \n" + " parameter_stable_id, \n" + " date_of_experiment, \n" + " datediff(date_of_experiment, ls.date_of_birth) as days_old, \n" + " e.organisation_id \n" + "FROM observation o \n" + " INNER JOIN unidimensional_observation uo ON uo.id = o.id \n" + " INNER JOIN live_sample ls ON ls.id=o.biological_sample_id \n" + " INNER JOIN experiment_observation eo ON o.id = eo.observation_id \n" + " INNER JOIN experiment e ON e.id = eo.experiment_id \n" + "WHERE parameter_stable_id IN ("+StringUtils.join(weightParamters, ",")+") AND data_point > 0" + " ORDER BY biological_sample_id, date_of_experiment ASC \n" ; try (PreparedStatement statement = getConnection().prepareStatement(query)) { ResultSet resultSet = statement.executeQuery(); while (resultSet.next()) { WeightBean b = new WeightBean(); b.date = resultSet.getDate("date_of_experiment"); b.weight = resultSet.getFloat("weight"); b.parameterStableId = resultSet.getString("parameter_stable_id"); b.daysOld = resultSet.getInt("days_old"); final Integer specimenId = resultSet.getInt("biological_sample_id"); if( ! weightMap.containsKey(specimenId)) { weightMap.put(specimenId, new ArrayList<WeightBean>()); } weightMap.get(specimenId).add(b); count+=1; } } logger.info("Added {} weights to the weightmap for {} specimens", count, weightMap.size()); } /** * Return map of specimen ID => weight for * * @exception SQLException When a database error occurrs */ public void populateIpgttWeightMap() throws SQLException { String query = "SELECT o.biological_sample_id, data_point AS weight, parameter_stable_id, " + "date_of_experiment, DATEDIFF(date_of_experiment, ls.date_of_birth) AS days_old " + "FROM observation o " + " INNER JOIN unidimensional_observation uo ON uo.id = o.id " + " INNER JOIN live_sample ls ON ls.id=o.biological_sample_id " + " INNER JOIN experiment_observation eo ON o.id = eo.observation_id " + " INNER JOIN experiment e ON e.id = eo.experiment_id " + "WHERE parameter_stable_id = '"+ipgttWeightParamter+"' " ; try (PreparedStatement statement = getConnection().prepareStatement(query)) { ResultSet resultSet = statement.executeQuery(); while (resultSet.next()) { WeightBean b = new WeightBean(); b.date = resultSet.getDate("date_of_experiment"); b.weight = resultSet.getFloat("weight"); b.parameterStableId = resultSet.getString("parameter_stable_id"); b.daysOld = resultSet.getInt("days_old"); final Integer specimenId = resultSet.getInt("biological_sample_id"); ipgttWeightMap.put(specimenId, b); } } } public static Connection getConnection() { return connection; } public Map<String, Map<String, String>> getTranslateCategoryNames() { return translateCategoryNames; } public Map<String, BiologicalDataBean> getLineBiologicalData() { return lineBiologicalData; } public Map<String, BiologicalDataBean> getBiologicalData() { return biologicalData; } public Map<Integer, DatasourceBean> getDatasourceMap() { return datasourceMap; } public Map<Integer, DatasourceBean> getProjectMap() { return projectMap; } /** * Internal class to act as Map value DTO for biological data */ protected class BiologicalDataBean { public String alleleAccession; public String alleleSymbol; public Integer biologicalModelId; public Integer biologicalSampleId; public String colonyId; public Date dateOfBirth; public String externalSampleId; public String geneAcc; public String geneSymbol; public String phenotypingCenterName; public Integer phenotypingCenterId; public String sampleGroup; public String sex; public String strainAcc; public String strainName; public String zygosity; } /** * Internal class to act as Map value DTO for weight data */ protected class WeightBean { public String parameterStableId; public Date date; public Float weight; public Integer daysOld; } /** * Internal class to act as Map value DTO for datasource data */ protected class DatasourceBean { public Integer id; public String name; } /** * Internal class to act as Map value DTO for datasource data */ protected class ParameterAssociationBean { public String parameterAssociationName; public String parameterAssociationValue; public Integer id; public Integer observationId; public String parameterStableId; public String sequenceId; public String dimId; } }
src/main/java/uk/ac/ebi/phenotype/solr/indexer/ObservationIndexer.java
package uk.ac.ebi.phenotype.solr.indexer; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import uk.ac.ebi.phenotype.pojo.BiologicalSampleType; import uk.ac.ebi.phenotype.pojo.SexType; import uk.ac.ebi.phenotype.pojo.ZygosityType; import uk.ac.ebi.phenotype.service.dto.ObservationDTO; import uk.ac.ebi.phenotype.solr.indexer.beans.ImpressBean; import uk.ac.ebi.phenotype.solr.indexer.utils.IndexerMap; import javax.sql.DataSource; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.*; /** * Populate the experiment core */ public class ObservationIndexer extends AbstractIndexer { private static final Logger logger = LoggerFactory.getLogger(ObservationIndexer.class); private static Connection connection; @Autowired @Qualifier("komp2DataSource") DataSource komp2DataSource; @Autowired @Qualifier("observationIndexing") SolrServer observationSolrServer; Map<String, BiologicalDataBean> biologicalData = new HashMap<>(); Map<String, BiologicalDataBean> lineBiologicalData = new HashMap<>(); Map<Integer, ImpressBean> pipelineMap = new HashMap<>(); Map<Integer, ImpressBean> procedureMap = new HashMap<>(); Map<Integer, ImpressBean> parameterMap = new HashMap<>(); Map<Integer, DatasourceBean> datasourceMap = new HashMap<>(); Map<Integer, DatasourceBean> projectMap = new HashMap<>(); Map<Integer, List<ParameterAssociationBean>> parameterAssociationMap = new HashMap<>(); Map<Integer, List<WeightBean>> weightMap = new HashMap<>(); Map<Integer, WeightBean> ipgttWeightMap = new HashMap<>(); Map<String, Map<String, String>> translateCategoryNames = new HashMap<>(); public final String ipgttWeightParamter = "IMPC_IPG_001_001"; public final List<String> weightParamters = Arrays.asList( "'IMPC_GRS_003_001'", "'IMPC_CAL_001_001'", "'IMPC_DXA_001_001'", "'IMPC_HWT_007_001'", "'IMPC_PAT_049_001'", "'IMPC_BWT_001_001'", "'IMPC_ABR_001_001'", "'IMPC_CHL_001_001'", "'TCP_CHL_001_001'", "'HMGU_ROT_004_001'", "'ESLIM_001_001_001'", "'ESLIM_002_001_001'", "'ESLIM_003_001_001'", "'ESLIM_004_001_001'", "'ESLIM_005_001_001'", "'ESLIM_020_001_001'", "'ESLIM_022_001_001'", "'ESLIM_009_001_003'", "'ESLIM_010_001_003'", "'ESLIM_011_001_011'", "'ESLIM_012_001_005'", "'ESLIM_013_001_018'", "'ESLIM_022_001_001'", "'GMC_916_001_022'", "'GMC_908_001_001'", "'GMC_900_001_001'", "'GMC_926_001_003'", "'GMC_922_001_002'", "'GMC_923_001_001'", "'GMC_921_001_002'", "'GMC_902_001_003'", "'GMC_912_001_018'", "'GMC_917_001_001'", "'GMC_920_001_001'", "'GMC_909_001_002'", "'GMC_914_001_001'" ); public final List<String> maleFertilityParameters = Arrays.asList( "IMPC_FER_001_001", "IMPC_FER_006_001", "IMPC_FER_007_001", "IMPC_FER_008_001", "IMPC_FER_009_001"); public final List<String> femaleFertilityParameters = Arrays.asList( "IMPC_FER_019_001", "IMPC_FER_010_001", "IMPC_FER_011_001", "IMPC_FER_012_001", "IMPC_FER_013_001"); public ObservationIndexer() { } @Override public void validateBuild() throws IndexerException { Long numFound = getDocumentCount(observationSolrServer); if (numFound <= MINIMUM_DOCUMENT_COUNT) throw new IndexerException(new ValidationException("Actual observation document count is " + numFound + ".")); if (numFound != documentCount) logger.warn("WARNING: Added " + documentCount + " observation documents but SOLR reports " + numFound + " documents."); else logger.info("validateBuild(): Indexed " + documentCount + " observation documents."); } public static void main(String[] args) throws IndexerException { ObservationIndexer main = new ObservationIndexer(); main.initialise(args); main.run(); main.validateBuild(); logger.info("Process finished. Exiting."); } @Override protected Logger getLogger() { return logger; } @Override public void initialise(String[] args) throws IndexerException { super.initialise(args); try { connection = komp2DataSource.getConnection(); logger.info("Populating impress maps"); pipelineMap = IndexerMap.getImpressPipelines(connection); procedureMap = IndexerMap.getImpressProcedures(connection); parameterMap = IndexerMap.getImpressParameters(connection); } catch (SQLException e) { throw new IndexerException(e); } printConfiguration(); } @Override public void run() throws IndexerException { Long start = System.currentTimeMillis(); try { logger.info("Populating data source, project, and category translation maps"); populateDatasourceDataMap(); populateCategoryNamesDataMap(); logger.info("Populating biological data maps"); populateBiologicalDataMap(); populateLineBiologicalDataMap(); populateParameterAssociationMap(); logger.info("Populating weight maps"); populateWeightMap(); populateIpgttWeightMap(); logger.info("Populating experiment solr core"); populateObservationSolrCore(); } catch (SolrServerException | SQLException | IOException e) { throw new IndexerException(e); } logger.info("Populating experiment solr core - done [took: {}s]", (System.currentTimeMillis() - start) / 1000.0); } public void populateObservationSolrCore() throws SQLException, IOException, SolrServerException { int count = 0; observationSolrServer.deleteByQuery("*:*"); String query = "SELECT o.id as id, o.db_id as datasource_id, o.parameter_id as parameter_id, o.parameter_stable_id, " + "o.observation_type, o.missing, o.parameter_status, o.parameter_status_message, " + "o.biological_sample_id, " + "e.project_id as project_id, e.pipeline_id as pipeline_id, e.procedure_id as procedure_id, " + "e.date_of_experiment, e.external_id, e.id as experiment_id, " + "e.metadata_combined as metadata_combined, e.metadata_group as metadata_group, " + "co.category as raw_category, " + "uo.data_point as unidimensional_data_point, " + "mo.data_point as multidimensional_data_point, " + "tso.data_point as time_series_data_point, " + "mo.order_index, " + "mo.dimension, " + "tso.time_point, " + "tso.discrete_point, " + "iro.file_type, " + "iro.download_file_path " + "FROM observation o " + "LEFT OUTER JOIN categorical_observation co ON o.id=co.id " + "LEFT OUTER JOIN unidimensional_observation uo ON o.id=uo.id " + "LEFT OUTER JOIN multidimensional_observation mo ON o.id=mo.id " + "LEFT OUTER JOIN time_series_observation tso ON o.id=tso.id " + "LEFT OUTER JOIN image_record_observation iro ON o.id=iro.id " + "INNER JOIN experiment_observation eo ON eo.observation_id=o.id " + "INNER JOIN experiment e on eo.experiment_id=e.id " + "WHERE o.missing=0"; try (PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { ObservationDTO o = new ObservationDTO(); o.setId(r.getInt("id")); o.setParameterId(r.getInt("parameter_id")); o.setExperimentId(r.getInt("experiment_id")); o.setDateOfExperiment(r.getDate("date_of_experiment")); o.setExperimentSourceId(r.getString("external_id")); o.setParameterId(parameterMap.get(r.getInt("parameter_id")).id); o.setParameterName(parameterMap.get(r.getInt("parameter_id")).name); o.setParameterStableId(parameterMap.get(r.getInt("parameter_id")).stableId); o.setProcedureId(procedureMap.get(r.getInt("procedure_id")).id); o.setProcedureName(procedureMap.get(r.getInt("procedure_id")).name); String procedureStableId = procedureMap.get(r.getInt("procedure_id")).stableId; o.setProcedureStableId(procedureStableId); o.setProcedureGroup(procedureStableId.substring(0, procedureStableId.lastIndexOf("_"))); o.setPipelineId(pipelineMap.get(r.getInt("pipeline_id")).id); o.setPipelineName(pipelineMap.get(r.getInt("pipeline_id")).name); o.setPipelineStableId(pipelineMap.get(r.getInt("pipeline_id")).stableId); o.setDataSourceId(datasourceMap.get(r.getInt("datasource_id")).id); o.setDataSourceName(datasourceMap.get(r.getInt("datasource_id")).name); o.setProjectId(projectMap.get(r.getInt("project_id")).id); o.setProjectName(projectMap.get(r.getInt("project_id")).name); o.setMetadataGroup(r.getString("metadata_group")); if (r.wasNull()) { o.setMetadataGroup(""); o.setMetadata(new ArrayList<String>()); } String metadataCombined = r.getString("metadata_combined"); if ( ! r.wasNull()) { o.setMetadata(new ArrayList<>(Arrays.asList(metadataCombined.split("::")))); } // Add the Biological data String bioSampleId = r.getString("biological_sample_id"); if (r.wasNull()) { // Line level data BiologicalDataBean b = lineBiologicalData.get(r.getString("experiment_id")); if (b == null) { logger.error("Cannot find biological data for experiment {}", r.getString("experiment_id")); continue; } o.setBiologicalModelId(b.biologicalModelId); o.setGeneAccession(b.geneAcc); o.setGeneSymbol(b.geneSymbol); o.setAlleleAccession(b.alleleAccession); o.setAlleleSymbol(b.alleleSymbol); o.setStrainAccessionId(b.strainAcc); o.setStrainName(b.strainName); o.setPhenotypingCenter(b.phenotypingCenterName); o.setPhenotypingCenterId(b.phenotypingCenterId); o.setColonyId(b.colonyId); // Viability applies to both sexes if (o.getParameterStableId().contains("_VIA_")) { o.setSex(SexType.both.getName()); } else { // Fertility applies to the sex tested, separate parameters per male//female if (maleFertilityParameters.contains(o.getParameterStableId())) { o.setSex(SexType.male.getName()); } else if (femaleFertilityParameters.contains(o.getParameterStableId())) { o.setSex(SexType.female.getName()); } if(o.getSex() == null) { o.setSex(SexType.both.getName()); } } if (b.zygosity != null) { o.setZygosity(b.zygosity); } else { // Default to hom o.setZygosity(ZygosityType.homozygote.getName()); } // All line level parameters are sample group "experimental" due to the nature of the // procedures (i.e. no control mice will go through VIA or FER procedures.) o.setGroup(BiologicalSampleType.experimental.getName()); } else { // Specimen level data BiologicalDataBean b = biologicalData.get(bioSampleId); o.setBiologicalModelId(b.biologicalModelId); o.setGeneAccession(b.geneAcc); o.setGeneSymbol(b.geneSymbol); o.setAlleleAccession(b.alleleAccession); o.setAlleleSymbol(b.alleleSymbol); o.setStrainAccessionId(b.strainAcc); o.setStrainName(b.strainName); o.setPhenotypingCenter(b.phenotypingCenterName); o.setPhenotypingCenterId(b.phenotypingCenterId); o.setColonyId(b.colonyId); o.setZygosity(b.zygosity); o.setDateOfBirth(b.dateOfBirth); o.setSex(b.sex); o.setGroup(b.sampleGroup); o.setBiologicalSampleId(b.biologicalSampleId); o.setExternalSampleId(b.externalSampleId); } o.setObservationType(r.getString("observation_type")); String cat = r.getString("raw_category"); if ( ! r.wasNull()) { String param = r.getString("parameter_stable_id"); if (translateCategoryNames.containsKey(param)) { String transCat = translateCategoryNames.get(param).get(cat); if (transCat != null && ! transCat.equals("")) { o.setCategory(transCat); } else { o.setCategory(cat); } } else { o.setCategory(cat); } } // Add the correct "data point" for the type switch (r.getString("observation_type")) { case "unidimensional": o.setDataPoint(r.getFloat("unidimensional_data_point")); break; case "multidimensional": o.setDataPoint(r.getFloat("multidimensional_data_point")); break; case "time_series": o.setDataPoint(r.getFloat("time_series_data_point")); break; } Integer order_index = r.getInt("order_index"); if ( ! r.wasNull()) { o.setOrderIndex(order_index); } String dimension = r.getString("dimension"); if ( ! r.wasNull()) { o.setDimension(dimension); } String time_point = r.getString("time_point"); if ( ! r.wasNull()) { o.setTimePoint(time_point); } Float discrete_point = r.getFloat("discrete_point"); if ( ! r.wasNull()) { o.setDiscretePoint(discrete_point); } String file_type = r.getString("file_type"); if ( ! r.wasNull()) { o.setFileType(file_type); } String download_file_path = r.getString("download_file_path"); if ( ! r.wasNull()) { o.setDownloadFilePath(download_file_path); } if (parameterAssociationMap.containsKey(r.getInt("id"))) { for (ParameterAssociationBean pb : parameterAssociationMap.get(r.getInt("id"))) { // Will never be null, we hope o.addParameterAssociationStableId(pb.parameterStableId); o.addParameterAssociationName(pb.parameterAssociationName); if (StringUtils.isNotEmpty(pb.parameterAssociationValue)) { o.addParameterAssociationValue(pb.parameterAssociationValue); } if (StringUtils.isNotEmpty(pb.sequenceId)) { o.addParameterAssociationSequenceId(pb.sequenceId); } if (StringUtils.isNotEmpty(pb.dimId)) { o.addParameterAssociationDimId(pb.dimId); } } } // Add weight parameters only if this observation isn't for a weight parameter if ( ! weightParamters.contains(o.getParameterStableId()) && ! ipgttWeightParamter.equals(o.getParameterStableId())) { WeightBean b = getNearestWeight(o.getBiologicalSampleId(), o.getDateOfExperiment()); if (o.getParameterStableId().equals(ipgttWeightParamter)) { b = getNearestIpgttWeight(o.getBiologicalSampleId()); } if (b != null) { o.setWeight(b.weight); o.setWeightDate(b.date); o.setWeightDaysOld(b.daysOld); o.setWeightParameterStableId(b.parameterStableId); } } // 60 seconds between commits documentCount++; observationSolrServer.addBean(o, 60000); count ++; if (count % 100000 == 0) { logger.info(" added " + count + " beans"); } } // Final commit to save the rest of the docs observationSolrServer.commit(); } catch (Exception e) { logger.error("Big error {}", e.getMessage(), e); } } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateBiologicalDataMap() throws SQLException { String query = "SELECT CAST(bs.id AS CHAR) as biological_sample_id, bs.organisation_id as phenotyping_center_id, " + "org.name as phenotyping_center_name, bs.sample_group, bs.external_id as external_sample_id, " + "ls.date_of_birth, ls.colony_id, ls.sex as sex, ls.zygosity, " + "bms.biological_model_id, " + "strain.acc as strain_acc, strain.name as strain_name, " + "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bms.biological_model_id) as allele_accession, " + "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bms.biological_model_id) as allele_symbol, " + "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bms.biological_model_id) as acc, " + "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bms.biological_model_id) as symbol " + "FROM biological_sample bs " + "INNER JOIN organisation org ON bs.organisation_id=org.id " + "INNER JOIN live_sample ls ON bs.id=ls.id " + "INNER JOIN biological_model_sample bms ON bs.id=bms.biological_sample_id " + "INNER JOIN biological_model_strain bmstrain ON bmstrain.biological_model_id=bms.biological_model_id " + "INNER JOIN strain strain ON strain.acc=bmstrain.strain_acc"; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { BiologicalDataBean b = new BiologicalDataBean(); b.alleleAccession = resultSet.getString("allele_accession"); b.alleleSymbol = resultSet.getString("allele_symbol"); b.biologicalModelId = resultSet.getInt("biological_model_id"); b.biologicalSampleId = resultSet.getInt("biological_sample_id"); b.colonyId = resultSet.getString("colony_id"); b.dateOfBirth = resultSet.getDate("date_of_birth"); b.externalSampleId = resultSet.getString("external_sample_id"); b.geneAcc = resultSet.getString("acc"); b.geneSymbol = resultSet.getString("symbol"); b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id"); b.phenotypingCenterName = resultSet.getString("phenotyping_center_name"); b.sampleGroup = resultSet.getString("sample_group"); b.sex = resultSet.getString("sex"); b.strainAcc = resultSet.getString("strain_acc"); b.strainName = resultSet.getString("strain_name"); b.zygosity = resultSet.getString("zygosity"); biologicalData.put(resultSet.getString("biological_sample_id"), b); } } } /** * Add all the relevant data required quickly looking up biological data * associated to a biological model (really an experiment) * * @throws SQLException when a database exception occurs */ public void populateLineBiologicalDataMap() throws SQLException { String query = "SELECT e.id as experiment_id, e.colony_id, e.id, bm.id as biological_model_id, " + "e.organisation_id as phenotyping_center_id, org.name as phenotyping_center_name, " + "strain.acc as strain_acc, strain.name as strain_name, " + "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bm.id) as allele_accession, " + "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) as allele_symbol, " + "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bm.id) as acc, " + "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id) as symbol " + "FROM experiment e " + "INNER JOIN biological_model bm ON bm.id=(SELECT DISTINCT biological_model_id FROM live_sample ls " + " INNER JOIN biological_model_sample bms ON bms.biological_sample_id=ls.id " + " INNER JOIN biological_model biom ON biom.id=bms.biological_model_id " + " WHERE biom.allelic_composition !='' AND ls.colony_id=e.colony_id LIMIT 1)" + "INNER JOIN organisation org ON e.organisation_id=org.id " + "INNER JOIN biological_model_strain bm_strain ON bm_strain.biological_model_id=bm.id " + "INNER JOIN strain strain ON strain.acc=bm_strain.strain_acc" ; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { BiologicalDataBean b = new BiologicalDataBean(); b.alleleAccession = resultSet.getString("allele_accession"); b.alleleSymbol = resultSet.getString("allele_symbol"); b.biologicalModelId = resultSet.getInt("biological_model_id"); b.colonyId = resultSet.getString("colony_id"); b.geneAcc = resultSet.getString("acc"); b.geneSymbol = resultSet.getString("symbol"); b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id"); b.phenotypingCenterName = resultSet.getString("phenotyping_center_name"); b.strainAcc = resultSet.getString("strain_acc"); b.strainName = resultSet.getString("strain_name"); lineBiologicalData.put(resultSet.getString("experiment_id"), b); } } } /** * Add all the relevant data required for translating the category names in * the cases where the category names are numerals, but the actual name is * in the description field * * @throws SQLException when a database exception occurs */ public void populateCategoryNamesDataMap() throws SQLException { String query = "SELECT pp.stable_id, ppo.name, ppo.description FROM phenotype_parameter pp \n" + "INNER JOIN phenotype_parameter_lnk_option pplo ON pp.id=pplo.parameter_id\n" + "INNER JOIN phenotype_parameter_option ppo ON ppo.id=pplo.option_id \n" + "WHERE ppo.name NOT REGEXP '^[a-zA-Z]' AND ppo.description!=''"; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { String stableId = resultSet.getString("stable_id"); if ( ! translateCategoryNames.containsKey(stableId)) { translateCategoryNames.put(stableId, new HashMap<String, String>()); } translateCategoryNames.get(stableId).put(resultSet.getString("name"), resultSet.getString("description")); } } } public void populateParameterAssociationMap() throws SQLException { Map<String, String> stableIdToNameMap = this.getAllParameters(); String query = "SELECT id, observation_id, parameter_id, sequence_id, dim_id, parameter_association_value FROM parameter_association"; try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { Integer obsId = resultSet.getInt("observation_id"); ParameterAssociationBean pb = new ParameterAssociationBean(); pb.observationId = obsId; pb.parameterStableId = resultSet.getString("parameter_id"); pb.parameterAssociationValue = resultSet.getString("parameter_association_value"); if (stableIdToNameMap.get(pb.parameterStableId) != null) { pb.parameterAssociationName = stableIdToNameMap.get(pb.parameterStableId); } pb.sequenceId = resultSet.getString("sequence_id"); pb.dimId = resultSet.getString("dim_id"); if ( ! parameterAssociationMap.containsKey(obsId)) { parameterAssociationMap.put(obsId, new ArrayList<ParameterAssociationBean>()); } parameterAssociationMap.get(obsId).add(pb); } } } /** * Return all parameter stable ids and names * * @exception SQLException When a database error occurrs */ public Map<String, String> getAllParameters() throws SQLException { Map<String, String> parameters = new HashMap<>(); String query = "SELECT stable_id, name FROM komp2.phenotype_parameter"; try (PreparedStatement statement = getConnection().prepareStatement(query)) { ResultSet resultSet = statement.executeQuery(); while (resultSet.next()) { parameters.put(resultSet.getString("stable_id"), resultSet.getString("name")); } } return parameters; } public void populateDatasourceDataMap() throws SQLException { List<String> queries = new ArrayList<>(); queries.add("SELECT id, short_name as name, 'DATASOURCE' as datasource_type FROM external_db"); queries.add("SELECT id, name, 'PROJECT' as datasource_type FROM project"); for (String query : queries) { try (PreparedStatement p = connection.prepareStatement(query)) { ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { DatasourceBean b = new DatasourceBean(); b.id = resultSet.getInt("id"); b.name = resultSet.getString("name"); switch (resultSet.getString("datasource_type")) { case "DATASOURCE": datasourceMap.put(resultSet.getInt("id"), b); break; case "PROJECT": projectMap.put(resultSet.getInt("id"), b); break; } } } } } /** * Compare all weight dates to select the nearest to the date of experiment * @param specimenID the specimen * @param dateOfExperiment the date * @return the nearest weight bean to the date of the experiment */ public WeightBean getNearestWeight(Integer specimenID, Date dateOfExperiment) { WeightBean nearest = null; if ( weightMap.containsKey(specimenID) ) { for (WeightBean candidate : weightMap.get(specimenID)) { if (nearest == null) { nearest = candidate; continue; } if (Math.abs(dateOfExperiment.getTime() - candidate.date.getTime()) < Math.abs(nearest.date.getTime() - candidate.date.getTime())) { nearest = candidate; } } } // Do not return weight that is > 4 days away from the experiment // since the weight of the specimen become less and less relevant // (Heuristic from Natasha Karp @ WTSI) // 4 days = 345,600,000 ms if (nearest != null && Math.abs(dateOfExperiment.getTime()-nearest.date.getTime()) > 3.456E8) { nearest = null; } return nearest; } /** * Select date of experiment * @param specimenID the specimen * @return the nearest weight bean to the date of the experiment */ public WeightBean getNearestIpgttWeight(Integer specimenID) { WeightBean nearest = null; if ( ipgttWeightMap.containsKey(specimenID) ) { nearest = ipgttWeightMap.get(specimenID); } return nearest; } /** * Return map of specimen ID => List of all weights ordered by date ASC * * @exception SQLException When a database error occurs */ public void populateWeightMap() throws SQLException { int count=0; String query = "SELECT\n" + " o.biological_sample_id, \n" + " data_point AS weight, \n" + " parameter_stable_id, \n" + " date_of_experiment, \n" + " datediff(date_of_experiment, ls.date_of_birth) as days_old, \n" + " e.organisation_id \n" + "FROM observation o \n" + " INNER JOIN unidimensional_observation uo ON uo.id = o.id \n" + " INNER JOIN live_sample ls ON ls.id=o.biological_sample_id \n" + " INNER JOIN experiment_observation eo ON o.id = eo.observation_id \n" + " INNER JOIN experiment e ON e.id = eo.experiment_id \n" + "WHERE parameter_stable_id IN ("+StringUtils.join(weightParamters, ",")+") AND data_point > 0" + " ORDER BY biological_sample_id, date_of_experiment ASC \n" ; try (PreparedStatement statement = getConnection().prepareStatement(query)) { ResultSet resultSet = statement.executeQuery(); while (resultSet.next()) { WeightBean b = new WeightBean(); b.date = resultSet.getDate("date_of_experiment"); b.weight = resultSet.getFloat("weight"); b.parameterStableId = resultSet.getString("parameter_stable_id"); b.daysOld = resultSet.getInt("days_old"); final Integer specimenId = resultSet.getInt("biological_sample_id"); if( ! weightMap.containsKey(specimenId)) { weightMap.put(specimenId, new ArrayList<WeightBean>()); } weightMap.get(specimenId).add(b); count+=1; } } logger.info("Added {} weights to the weightmap for {} specimens", count, weightMap.size()); } /** * Return map of specimen ID => weight for * * @exception SQLException When a database error occurrs */ public void populateIpgttWeightMap() throws SQLException { String query = "SELECT o.biological_sample_id, data_point AS weight, parameter_stable_id, " + "date_of_experiment, DATEDIFF(date_of_experiment, ls.date_of_birth) AS days_old " + "FROM observation o " + " INNER JOIN unidimensional_observation uo ON uo.id = o.id " + " INNER JOIN live_sample ls ON ls.id=o.biological_sample_id " + " INNER JOIN experiment_observation eo ON o.id = eo.observation_id " + " INNER JOIN experiment e ON e.id = eo.experiment_id " + "WHERE parameter_stable_id = '"+ipgttWeightParamter+"' " ; try (PreparedStatement statement = getConnection().prepareStatement(query)) { ResultSet resultSet = statement.executeQuery(); while (resultSet.next()) { WeightBean b = new WeightBean(); b.date = resultSet.getDate("date_of_experiment"); b.weight = resultSet.getFloat("weight"); b.parameterStableId = resultSet.getString("parameter_stable_id"); b.daysOld = resultSet.getInt("days_old"); final Integer specimenId = resultSet.getInt("biological_sample_id"); ipgttWeightMap.put(specimenId, b); } } } public static Connection getConnection() { return connection; } public Map<String, Map<String, String>> getTranslateCategoryNames() { return translateCategoryNames; } public Map<String, BiologicalDataBean> getLineBiologicalData() { return lineBiologicalData; } public Map<String, BiologicalDataBean> getBiologicalData() { return biologicalData; } public Map<Integer, DatasourceBean> getDatasourceMap() { return datasourceMap; } public Map<Integer, DatasourceBean> getProjectMap() { return projectMap; } /** * Internal class to act as Map value DTO for biological data */ protected class BiologicalDataBean { public String alleleAccession; public String alleleSymbol; public Integer biologicalModelId; public Integer biologicalSampleId; public String colonyId; public Date dateOfBirth; public String externalSampleId; public String geneAcc; public String geneSymbol; public String phenotypingCenterName; public Integer phenotypingCenterId; public String sampleGroup; public String sex; public String strainAcc; public String strainName; public String zygosity; } /** * Internal class to act as Map value DTO for weight data */ protected class WeightBean { public String parameterStableId; public Date date; public Float weight; public Integer daysOld; } /** * Internal class to act as Map value DTO for datasource data */ protected class DatasourceBean { public Integer id; public String name; } /** * Internal class to act as Map value DTO for datasource data */ protected class ParameterAssociationBean { public String parameterAssociationName; public String parameterAssociationValue; public Integer id; public Integer observationId; public String parameterStableId; public String sequenceId; public String dimId; } }
only override the biological if the correct one isn't found not set already
src/main/java/uk/ac/ebi/phenotype/solr/indexer/ObservationIndexer.java
only override the biological if the correct one isn't found not set already
Java
apache-2.0
c26f443cb830167cecfafc49e853c5bbf95dbaaf
0
marverenic/Jockey,marverenic/Jockey
package com.marverenic.music.fragments; import android.app.ActionBar; import android.content.Context; import android.media.audiofx.Equalizer; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.AlphaAnimation; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.CompoundButton; import android.widget.LinearLayout; import android.widget.SeekBar; import android.widget.Spinner; import android.widget.TextView; import com.marverenic.music.JockeyApplication; import com.marverenic.music.R; import com.marverenic.music.data.store.PreferenceStore; import com.marverenic.music.player.PlayerController; import com.marverenic.music.player.RemoteEqualizer; import com.marverenic.music.utils.Util; import javax.inject.Inject; public class EqualizerFragment extends Fragment implements CompoundButton.OnCheckedChangeListener, FragmentManager.OnBackStackChangedListener { @Inject PreferenceStore mPrefStore; @Inject PlayerController mPlayerController; private RemoteEqualizer equalizer; private EqualizerFrame[] sliders; private TextView presetSpinnerPrefix; private Spinner presetSpinner; private SwitchCompat equalizerToggle; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getActivity().getSupportFragmentManager().addOnBackStackChangedListener(this); JockeyApplication.getComponent(this).inject(this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View layout = inflater.inflate(R.layout.fragment_equalizer, container, false); presetSpinnerPrefix = (TextView) layout.findViewById(R.id.eq_preset_prefix); presetSpinner = (Spinner) layout.findViewById(R.id.eq_preset_spinner); Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); if (toolbar != null) { equalizerToggle = new SwitchCompat(getActivity()); equalizerToggle.setOnCheckedChangeListener(this); Toolbar.LayoutParams params = new Toolbar.LayoutParams( ActionBar.LayoutParams.WRAP_CONTENT, ActionBar.LayoutParams.WRAP_CONTENT, Gravity.END); int padding = (int) (16 * getResources().getDisplayMetrics().density); params.setMargins(padding, 0, padding, 0); toolbar.addView(equalizerToggle, params); AlphaAnimation anim = new AlphaAnimation(0f, 1.0f); anim.setDuration(getResources().getInteger(android.R.integer.config_shortAnimTime)); anim.setInterpolator(getContext(), android.R.anim.decelerate_interpolator); equalizerToggle.startAnimation(anim); } LinearLayout equalizerPanel = (LinearLayout) layout.findViewById(R.id.equalizer_panel); equalizer = generateEqualizerConfig(); int bandCount = (equalizer != null) ? equalizer.getNumberOfBands() : 0; sliders = new EqualizerFrame[bandCount]; PresetAdapter presetAdapter = new PresetAdapter(getActivity(), equalizer, sliders); presetSpinner.setAdapter(presetAdapter); presetSpinner.setSelection(mPrefStore.getEqualizerPresetId() + 1); presetSpinner.setOnItemSelectedListener(presetAdapter); for (short i = 0; i < bandCount; i++) { inflater.inflate(R.layout.instance_eq_slider, equalizerPanel, true); sliders[i] = new EqualizerFrame(equalizerPanel.getChildAt(i), equalizer, i, presetSpinner); } setEqualizerEnabled(mPrefStore.getEqualizerEnabled()); // If this device already has an application that can handle equalizers system-wide, inform // the user of possible issues by using Jockey's built-in equalizer if (Util.getSystemEqIntent(getActivity()) != null) { ((TextView) layout.findViewById(R.id.equalizer_notes)) .setText(R.string.equalizerNoteSystem); } return layout; } private RemoteEqualizer generateEqualizerConfig() { // Obtain an instance of the system equalizer to discover available configuration options // for an equalizer including bands and presets. This equalizer is not used to control // audio settings and is released before this method ends Equalizer systemEqualizer = new Equalizer(0, 1); RemoteEqualizer eq = new RemoteEqualizer(systemEqualizer); Equalizer.Settings settings = mPrefStore.getEqualizerSettings(); if (settings != null) { eq.setProperties(mPrefStore.getEqualizerSettings()); } systemEqualizer.release(); return eq; } private void applyEqualizer() { mPrefStore.setEqualizerPresetId((int) presetSpinner.getSelectedItemId()); mPrefStore.setEqualizerSettings(equalizer.getProperties()); mPrefStore.setEqualizerEnabled(equalizerToggle.isChecked()); mPlayerController.updatePlayerPreferences(mPrefStore); } @Override public void onResume() { super.onResume(); Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); if (toolbar != null) { toolbar.setTitle(R.string.header_equalizer); } } private void setEqualizerEnabled(boolean enabled) { if (equalizerToggle.isChecked() != enabled) { equalizerToggle.setChecked(enabled); } presetSpinnerPrefix.setEnabled(enabled); presetSpinner.setEnabled(enabled); for (EqualizerFrame f : sliders) { f.update(equalizer.getCurrentPreset() == -1 && enabled); } applyEqualizer(); } @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { setEqualizerEnabled(isChecked); applyEqualizer(); } @Override public void onBackStackChanged() { if (isRemoving()) { final Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); if (toolbar != null) { final int duration = getResources().getInteger(android.R.integer.config_mediumAnimTime); AlphaAnimation anim = new AlphaAnimation(1.0f, 0f); anim.setDuration(duration); anim.setInterpolator(getContext(), android.R.anim.decelerate_interpolator); equalizerToggle.startAnimation(anim); new Handler().postDelayed(() -> toolbar.removeView(equalizerToggle), duration); } applyEqualizer(); } } private class PresetAdapter extends BaseAdapter implements AdapterView.OnItemSelectedListener { private Context context; private String[] presets; private EqualizerFrame[] sliders; PresetAdapter(Context context, RemoteEqualizer equalizer, EqualizerFrame[] sliders) { this.context = context; this.sliders = sliders; presets = new String[equalizer.getNumberOfPresets() + 1]; presets[0] = "Custom"; // TODO String resource for (short i = 0; i < presets.length - 1; i++) { presets[i + 1] = equalizer.getPresetName(i); } } @Override public int getCount() { return presets.length; } @Override public Object getItem(int position) { return presets[position]; } @Override public long getItemId(int position) { return position - 1; } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = LayoutInflater .from(context) .inflate(android.R.layout.simple_spinner_item, parent, false); } TextView textView = (TextView) convertView.findViewById(android.R.id.text1); textView.setText(presets[position]); return convertView; } @Override public View getDropDownView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = LayoutInflater .from(context) .inflate(android.R.layout.simple_spinner_dropdown_item, parent, false); } return getView(position, convertView, parent); } @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { equalizer.usePreset((short) id); applyEqualizer(); for (EqualizerFrame f : sliders) { f.update(id == -1 && equalizerToggle.isChecked()); } } @Override public void onNothingSelected(AdapterView<?> parent) { } } private class EqualizerFrame implements SeekBar.OnSeekBarChangeListener { final short bandNumber; final SeekBar bandSlider; final TextView bandLabel; final Spinner presetSpinner; final int minLevel; final int maxLevel; public EqualizerFrame(View root, RemoteEqualizer eq, short bandNumber, Spinner presetSpinner) { this.bandNumber = bandNumber; this.presetSpinner = presetSpinner; bandSlider = (SeekBar) root.findViewById(R.id.eq_slider); bandLabel = (TextView) root.findViewById(R.id.eq_band_name); int frequency = eq.getCenterFreq(bandNumber) / 1000; if (frequency > 1000) { bandLabel.setText(frequency / 1000 + "K"); } else { bandLabel.setText(Integer.toString(frequency)); } int[] range = eq.getBandLevelRange(); minLevel = range[0]; maxLevel = range[1]; bandSlider.setMax(Math.abs(minLevel) + maxLevel); bandSlider.setProgress(eq.getBandLevel(bandNumber) + Math.abs(range[0])); bandSlider.setOnSeekBarChangeListener(this); } public void update(boolean enabled) { bandSlider.setEnabled(enabled); bandLabel.setEnabled(enabled); } public void update(int level) { bandSlider.setProgress(level + Math.abs(minLevel)); } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { equalizer.setBandLevel(bandNumber, (short) (progress - Math.abs(minLevel))); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { presetSpinner.setSelection(0); equalizer.usePreset(-1); applyEqualizer(); } } }
app/src/main/java/com/marverenic/music/fragments/EqualizerFragment.java
package com.marverenic.music.fragments; import android.app.ActionBar; import android.content.Context; import android.media.audiofx.Equalizer; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.AlphaAnimation; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.CompoundButton; import android.widget.LinearLayout; import android.widget.SeekBar; import android.widget.Spinner; import android.widget.TextView; import com.marverenic.music.JockeyApplication; import com.marverenic.music.R; import com.marverenic.music.data.store.PreferenceStore; import com.marverenic.music.player.OldPlayerController; import com.marverenic.music.player.RemoteEqualizer; import com.marverenic.music.utils.Util; import javax.inject.Inject; public class EqualizerFragment extends Fragment implements CompoundButton.OnCheckedChangeListener, FragmentManager.OnBackStackChangedListener { @Inject PreferenceStore mPrefStore; private RemoteEqualizer equalizer; private EqualizerFrame[] sliders; private TextView presetSpinnerPrefix; private Spinner presetSpinner; private SwitchCompat equalizerToggle; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getActivity().getSupportFragmentManager().addOnBackStackChangedListener(this); JockeyApplication.getComponent(this).inject(this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View layout = inflater.inflate(R.layout.fragment_equalizer, container, false); presetSpinnerPrefix = (TextView) layout.findViewById(R.id.eq_preset_prefix); presetSpinner = (Spinner) layout.findViewById(R.id.eq_preset_spinner); Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); if (toolbar != null) { equalizerToggle = new SwitchCompat(getActivity()); equalizerToggle.setOnCheckedChangeListener(this); Toolbar.LayoutParams params = new Toolbar.LayoutParams( ActionBar.LayoutParams.WRAP_CONTENT, ActionBar.LayoutParams.WRAP_CONTENT, Gravity.END); int padding = (int) (16 * getResources().getDisplayMetrics().density); params.setMargins(padding, 0, padding, 0); toolbar.addView(equalizerToggle, params); AlphaAnimation anim = new AlphaAnimation(0f, 1.0f); anim.setDuration(getResources().getInteger(android.R.integer.config_shortAnimTime)); anim.setInterpolator(getContext(), android.R.anim.decelerate_interpolator); equalizerToggle.startAnimation(anim); } LinearLayout equalizerPanel = (LinearLayout) layout.findViewById(R.id.equalizer_panel); equalizer = generateEqualizerConfig(); int bandCount = (equalizer != null) ? equalizer.getNumberOfBands() : 0; sliders = new EqualizerFrame[bandCount]; PresetAdapter presetAdapter = new PresetAdapter(getActivity(), equalizer, sliders); presetSpinner.setAdapter(presetAdapter); presetSpinner.setSelection(mPrefStore.getEqualizerPresetId() + 1); presetSpinner.setOnItemSelectedListener(presetAdapter); for (short i = 0; i < bandCount; i++) { inflater.inflate(R.layout.instance_eq_slider, equalizerPanel, true); sliders[i] = new EqualizerFrame(equalizerPanel.getChildAt(i), equalizer, i, presetSpinner); } setEqualizerEnabled(mPrefStore.getEqualizerEnabled()); // If this device already has an application that can handle equalizers system-wide, inform // the user of possible issues by using Jockey's built-in equalizer if (Util.getSystemEqIntent(getActivity()) != null) { ((TextView) layout.findViewById(R.id.equalizer_notes)) .setText(R.string.equalizerNoteSystem); } return layout; } private RemoteEqualizer generateEqualizerConfig() { // Obtain an instance of the system equalizer to discover available configuration options // for an equalizer including bands and presets. This equalizer is not used to control // audio settings and is released before this method ends Equalizer systemEqualizer = new Equalizer(0, 1); RemoteEqualizer eq = new RemoteEqualizer(systemEqualizer); Equalizer.Settings settings = mPrefStore.getEqualizerSettings(); if (settings != null) { eq.setProperties(mPrefStore.getEqualizerSettings()); } systemEqualizer.release(); return eq; } private void applyEqualizer() { mPrefStore.setEqualizerPresetId((int) presetSpinner.getSelectedItemId()); mPrefStore.setEqualizerSettings(equalizer.getProperties()); mPrefStore.setEqualizerEnabled(equalizerToggle.isChecked()); OldPlayerController.updatePlayerPreferences(mPrefStore); } @Override public void onResume() { super.onResume(); Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); if (toolbar != null) { toolbar.setTitle(R.string.header_equalizer); } } private void setEqualizerEnabled(boolean enabled) { if (equalizerToggle.isChecked() != enabled) { equalizerToggle.setChecked(enabled); } presetSpinnerPrefix.setEnabled(enabled); presetSpinner.setEnabled(enabled); for (EqualizerFrame f : sliders) { f.update(equalizer.getCurrentPreset() == -1 && enabled); } applyEqualizer(); } @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { setEqualizerEnabled(isChecked); applyEqualizer(); } @Override public void onBackStackChanged() { if (isRemoving()) { final Toolbar toolbar = (Toolbar) getActivity().findViewById(R.id.toolbar); if (toolbar != null) { final int duration = getResources().getInteger(android.R.integer.config_mediumAnimTime); AlphaAnimation anim = new AlphaAnimation(1.0f, 0f); anim.setDuration(duration); anim.setInterpolator(getContext(), android.R.anim.decelerate_interpolator); equalizerToggle.startAnimation(anim); new Handler().postDelayed(() -> toolbar.removeView(equalizerToggle), duration); } applyEqualizer(); } } private class PresetAdapter extends BaseAdapter implements AdapterView.OnItemSelectedListener { private Context context; private String[] presets; private EqualizerFrame[] sliders; PresetAdapter(Context context, RemoteEqualizer equalizer, EqualizerFrame[] sliders) { this.context = context; this.sliders = sliders; presets = new String[equalizer.getNumberOfPresets() + 1]; presets[0] = "Custom"; // TODO String resource for (short i = 0; i < presets.length - 1; i++) { presets[i + 1] = equalizer.getPresetName(i); } } @Override public int getCount() { return presets.length; } @Override public Object getItem(int position) { return presets[position]; } @Override public long getItemId(int position) { return position - 1; } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = LayoutInflater .from(context) .inflate(android.R.layout.simple_spinner_item, parent, false); } TextView textView = (TextView) convertView.findViewById(android.R.id.text1); textView.setText(presets[position]); return convertView; } @Override public View getDropDownView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = LayoutInflater .from(context) .inflate(android.R.layout.simple_spinner_dropdown_item, parent, false); } return getView(position, convertView, parent); } @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { equalizer.usePreset((short) id); applyEqualizer(); for (EqualizerFrame f : sliders) { f.update(id == -1 && equalizerToggle.isChecked()); } } @Override public void onNothingSelected(AdapterView<?> parent) { } } private class EqualizerFrame implements SeekBar.OnSeekBarChangeListener { final short bandNumber; final SeekBar bandSlider; final TextView bandLabel; final Spinner presetSpinner; final int minLevel; final int maxLevel; public EqualizerFrame(View root, RemoteEqualizer eq, short bandNumber, Spinner presetSpinner) { this.bandNumber = bandNumber; this.presetSpinner = presetSpinner; bandSlider = (SeekBar) root.findViewById(R.id.eq_slider); bandLabel = (TextView) root.findViewById(R.id.eq_band_name); int frequency = eq.getCenterFreq(bandNumber) / 1000; if (frequency > 1000) { bandLabel.setText(frequency / 1000 + "K"); } else { bandLabel.setText(Integer.toString(frequency)); } int[] range = eq.getBandLevelRange(); minLevel = range[0]; maxLevel = range[1]; bandSlider.setMax(Math.abs(minLevel) + maxLevel); bandSlider.setProgress(eq.getBandLevel(bandNumber) + Math.abs(range[0])); bandSlider.setOnSeekBarChangeListener(this); } public void update(boolean enabled) { bandSlider.setEnabled(enabled); bandLabel.setEnabled(enabled); } public void update(int level) { bandSlider.setProgress(level + Math.abs(minLevel)); } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { equalizer.setBandLevel(bandNumber, (short) (progress - Math.abs(minLevel))); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { presetSpinner.setSelection(0); equalizer.usePreset(-1); applyEqualizer(); } } }
Use injected PlayerController on equalizer settings page
app/src/main/java/com/marverenic/music/fragments/EqualizerFragment.java
Use injected PlayerController on equalizer settings page
Java
apache-2.0
b35ffb306d3cf972c500d746fd313879e57debce
0
tanrabad/survey,nectec-wisru/android-TanlabadSurvey,nectec-wisru/android-TanrabadSurvey
/* * Copyright (c) 2019 NECTEC * National Electronics and Computer Technology Center, Thailand * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.tanrabad.survey.presenter; import android.os.Handler; import android.os.Message; import org.tanrabad.survey.BuildConfig; import org.tanrabad.survey.service.GithubReleaseService; import org.tanrabad.survey.service.json.GithubReleaseJson; public class CheckVersionThread extends Thread { private static final int FOUND_NEWER = 1; private static final int ALREADY_LATEST = 0; private final Handler handler; private boolean pause = false; public CheckVersionThread(CheckVersionListener listener) { handler = new CheckLatestVersionHandler(listener); } @Override public void run() { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } GithubReleaseService service = new GithubReleaseService(); GithubReleaseJson json = service.getLatest(); if (json == null) { handler.sendEmptyMessage(ALREADY_LATEST); return; } Version latest = new Version(json.tagName, json.prerelease); if (pause) return; if (latest.isPreRelease) { handler.sendEmptyMessage(ALREADY_LATEST); return; } if (latest.compareTo(new Version(BuildConfig.VERSION_NAME)) > 0) { Message msg = new Message(); msg.what = FOUND_NEWER; msg.obj = latest; handler.sendMessage(msg); } else { handler.sendEmptyMessage(ALREADY_LATEST); } } public void pause() { this.pause = true; } interface CheckVersionListener { void onAlreadyLatest(); void onFoundNewer(Version version); } static class CheckLatestVersionHandler extends Handler { private final CheckVersionListener listener; CheckLatestVersionHandler(CheckVersionListener listener) { this.listener = listener; } @Override public void handleMessage(Message msg) { switch (msg.what) { case FOUND_NEWER: listener.onFoundNewer((Version) msg.obj); break; case ALREADY_LATEST: listener.onAlreadyLatest(); break; default: throw new IllegalArgumentException("Not recongnize message"); } } } }
app/src/main/java/org/tanrabad/survey/presenter/CheckVersionThread.java
/* * Copyright (c) 2019 NECTEC * National Electronics and Computer Technology Center, Thailand * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.tanrabad.survey.presenter; import android.os.Handler; import android.os.Message; import org.tanrabad.survey.BuildConfig; import org.tanrabad.survey.service.GithubReleaseService; import org.tanrabad.survey.service.json.GithubReleaseJson; public class CheckVersionThread extends Thread { private static final int FOUND_NEWER = 1; private static final int ALREADY_LATEST = 0; private final Handler handler; private boolean pause = false; public CheckVersionThread(CheckVersionListener listener) { handler = new CheckLatestVersionHandler(listener); } @Override public void run() { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } GithubReleaseService service = new GithubReleaseService(); GithubReleaseJson json = service.getLatest(); Version latest = new Version(json.tagName, json.prerelease); if (pause) return; if (latest.isPreRelease) { handler.sendEmptyMessage(ALREADY_LATEST); return; } if (latest.compareTo(new Version(BuildConfig.VERSION_NAME)) > 0) { Message msg = new Message(); msg.what = FOUND_NEWER; msg.obj = latest; handler.sendMessage(msg); } else { handler.sendEmptyMessage(ALREADY_LATEST); } } public void pause() { this.pause = true; } interface CheckVersionListener { void onAlreadyLatest(); void onFoundNewer(Version version); } static class CheckLatestVersionHandler extends Handler { private final CheckVersionListener listener; CheckLatestVersionHandler(CheckVersionListener listener) { this.listener = listener; } @Override public void handleMessage(Message msg) { switch (msg.what) { case FOUND_NEWER: listener.onFoundNewer((Version) msg.obj); break; case ALREADY_LATEST: listener.onAlreadyLatest(); break; default: throw new IllegalArgumentException("Not recongnize message"); } } } }
skip check version if not response
app/src/main/java/org/tanrabad/survey/presenter/CheckVersionThread.java
skip check version if not response
Java
apache-2.0
001609d171d7dda3735bd71ce5670492e06baf4d
0
springfox/springfox,springfox/springfox,springfox/springfox,springfox/springfox
/* * * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package springfox.documentation.schema; import com.fasterxml.classmate.ResolvedType; import com.fasterxml.classmate.TypeResolver; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; import springfox.documentation.schema.property.ModelPropertiesProvider; import springfox.documentation.spi.schema.EnumTypeDeterminer; import springfox.documentation.spi.schema.contexts.ModelContext; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import static com.google.common.base.Predicates.*; import static com.google.common.collect.FluentIterable.*; import static com.google.common.collect.Lists.*; import static springfox.documentation.schema.Collections.*; import static springfox.documentation.schema.Maps.*; import static springfox.documentation.schema.ResolvedTypes.*; @Component @Qualifier("default") public class DefaultModelDependencyProvider implements ModelDependencyProvider { private static final Logger LOG = LoggerFactory.getLogger(DefaultModelDependencyProvider.class); private final TypeResolver typeResolver; private final ModelPropertiesProvider propertiesProvider; private final TypeNameExtractor nameExtractor; private final EnumTypeDeterminer enumTypeDeterminer; @Autowired public DefaultModelDependencyProvider( TypeResolver typeResolver, @Qualifier("cachedModelProperties") ModelPropertiesProvider propertiesProvider, TypeNameExtractor nameExtractor, EnumTypeDeterminer enumTypeDeterminer) { this.typeResolver = typeResolver; this.propertiesProvider = propertiesProvider; this.nameExtractor = nameExtractor; this.enumTypeDeterminer = enumTypeDeterminer; } @Override public Set<ResolvedType> dependentModels(ModelContext modelContext) { return from(resolvedDependencies(modelContext)) .filter(ignorableTypes(modelContext)) .filter(not(baseTypes(modelContext))) .toSet(); } private Predicate<ResolvedType> baseTypes(final ModelContext modelContext) { return new Predicate<ResolvedType>() { @Override public boolean apply(ResolvedType resolvedType) { return isBaseType(ModelContext.fromParent(modelContext, resolvedType)); } }; } private boolean isBaseType(ModelContext modelContext) { String typeName = nameExtractor.typeName(modelContext); return Types.isBaseType(typeName); } private Predicate<ResolvedType> ignorableTypes(final ModelContext modelContext) { return new Predicate<ResolvedType>() { @Override public boolean apply(ResolvedType input) { return !modelContext.hasSeenBefore(input); } }; } private List<ResolvedType> resolvedDependencies(ModelContext modelContext) { ResolvedType resolvedType = modelContext.alternateFor(modelContext.resolvedType(typeResolver)); if (isBaseType(ModelContext.fromParent(modelContext, resolvedType))) { LOG.debug("Marking base type {} as seen", resolvedType.getSignature()); modelContext.seen(resolvedType); return newArrayList(); } List<ResolvedType> dependencies = newArrayList(resolvedTypeParameters(modelContext, resolvedType)); dependencies.addAll(resolvedArrayElementType(modelContext, resolvedType)); dependencies.addAll(resolvedMapType(modelContext, resolvedType)); dependencies.addAll(resolvedPropertiesAndFields(modelContext, resolvedType)); return dependencies; } private Collection<? extends ResolvedType> resolvedMapType(ModelContext modelContext, ResolvedType resolvedType) { ResolvedType mapType = resolvedType.findSupertype(Map.class); if (mapType == null) { return newArrayList(); } return resolvedTypeParameters(modelContext, mapType); } private List<? extends ResolvedType> resolvedArrayElementType(ModelContext modelContext, ResolvedType resolvedType) { List<ResolvedType> parameters = newArrayList(); if (resolvedType.isArray()) { ResolvedType elementType = resolvedType.getArrayElementType(); LOG.debug("Adding type for element {}", elementType.getSignature()); parameters.add(modelContext.alternateFor(elementType)); LOG.debug("Recursively resolving dependencies for element {}", elementType.getSignature()); parameters.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, elementType))); } return parameters; } private List<? extends ResolvedType> resolvedTypeParameters(ModelContext modelContext, ResolvedType resolvedType) { List<ResolvedType> parameters = newArrayList(); for (ResolvedType parameter : resolvedType.getTypeParameters()) { LOG.debug("Adding type for parameter {}", parameter.getSignature()); parameters.add(modelContext.alternateFor(parameter)); LOG.debug("Recursively resolving dependencies for parameter {}", parameter.getSignature()); parameters.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, parameter))); } return parameters; } private List<ResolvedType> resolvedPropertiesAndFields(ModelContext modelContext, ResolvedType resolvedType) { if (modelContext.hasSeenBefore(resolvedType) || enumTypeDeterminer.isEnum(resolvedType.getErasedType())) { return newArrayList(); } modelContext.seen(resolvedType); List<ResolvedType> properties = newArrayList(); for (ModelProperty property : nonTrivialProperties(modelContext, resolvedType)) { LOG.debug("Adding type {} for parameter {}", property.getType().getSignature(), property.getName()); if (!isMapType(property.getType())) { properties.add(property.getType()); } properties.addAll(maybeFromCollectionElementType(modelContext, property)); properties.addAll(maybeFromMapValueType(modelContext, property)); properties.addAll(maybeFromRegularType(modelContext, property)); } return properties; } private FluentIterable<ModelProperty> nonTrivialProperties(ModelContext modelContext, ResolvedType resolvedType) { return from(propertiesFor(modelContext, resolvedType)) .filter(not(baseProperty(modelContext))); } private Predicate<? super ModelProperty> baseProperty(final ModelContext modelContext) { return new Predicate<ModelProperty>() { @Override public boolean apply(ModelProperty input) { return isBaseType(ModelContext.fromParent(modelContext, input.getType())); } }; } private List<ResolvedType> maybeFromRegularType(ModelContext modelContext, ModelProperty property) { if (isContainerType(property.getType()) || isMapType(property.getType())) { return newArrayList(); } LOG.debug("Recursively resolving dependencies for type {}", resolvedTypeSignature(property.getType()).or("<null>")); return newArrayList(resolvedDependencies(ModelContext.fromParent(modelContext, property.getType()))); } private List<ResolvedType> maybeFromCollectionElementType(ModelContext modelContext, ModelProperty property) { List<ResolvedType> dependencies = newArrayList(); if (isContainerType(property.getType())) { ResolvedType collectionElementType = collectionElementType(property.getType()); String resolvedTypeSignature = resolvedTypeSignature(collectionElementType).or("<null>"); if (!isBaseType(ModelContext.fromParent(modelContext, collectionElementType))) { LOG.debug("Adding collectionElement type {}", resolvedTypeSignature); dependencies.add(collectionElementType); } LOG.debug("Recursively resolving dependencies for collectionElement type {}", resolvedTypeSignature); dependencies.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, collectionElementType))); } return dependencies; } private List<ResolvedType> maybeFromMapValueType(ModelContext modelContext, ModelProperty property) { List<ResolvedType> dependencies = newArrayList(); if (isMapType(property.getType())) { ResolvedType valueType = Maps.mapValueType(property.getType()); String resolvedTypeSignature = resolvedTypeSignature(valueType).or("<null>"); if (!isBaseType(ModelContext.fromParent(modelContext, valueType))) { LOG.debug("Adding value type {}", resolvedTypeSignature); dependencies.add(valueType); } LOG.debug("Recursively resolving dependencies for value type {}", resolvedTypeSignature); dependencies.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, valueType))); } return dependencies; } private List<ModelProperty> propertiesFor(ModelContext modelContext, ResolvedType resolvedType) { return propertiesProvider.propertiesFor(resolvedType, modelContext); } }
springfox-schema/src/main/java/springfox/documentation/schema/DefaultModelDependencyProvider.java
/* * * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package springfox.documentation.schema; import com.fasterxml.classmate.ResolvedType; import com.fasterxml.classmate.TypeResolver; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; import springfox.documentation.schema.property.ModelPropertiesProvider; import springfox.documentation.spi.schema.EnumTypeDeterminer; import springfox.documentation.spi.schema.contexts.ModelContext; import java.util.List; import java.util.Set; import static com.google.common.base.Predicates.*; import static com.google.common.collect.FluentIterable.*; import static com.google.common.collect.Lists.*; import static springfox.documentation.schema.Collections.*; import static springfox.documentation.schema.Maps.*; import static springfox.documentation.schema.ResolvedTypes.*; @Component @Qualifier("default") public class DefaultModelDependencyProvider implements ModelDependencyProvider { private static final Logger LOG = LoggerFactory.getLogger(DefaultModelDependencyProvider.class); private final TypeResolver typeResolver; private final ModelPropertiesProvider propertiesProvider; private final TypeNameExtractor nameExtractor; private final EnumTypeDeterminer enumTypeDeterminer; @Autowired public DefaultModelDependencyProvider( TypeResolver typeResolver, @Qualifier("cachedModelProperties") ModelPropertiesProvider propertiesProvider, TypeNameExtractor nameExtractor, EnumTypeDeterminer enumTypeDeterminer) { this.typeResolver = typeResolver; this.propertiesProvider = propertiesProvider; this.nameExtractor = nameExtractor; this.enumTypeDeterminer = enumTypeDeterminer; } @Override public Set<ResolvedType> dependentModels(ModelContext modelContext) { return from(resolvedDependencies(modelContext)) .filter(ignorableTypes(modelContext)) .filter(not(baseTypes(modelContext))) .toSet(); } private Predicate<ResolvedType> baseTypes(final ModelContext modelContext) { return new Predicate<ResolvedType>() { @Override public boolean apply(ResolvedType resolvedType) { return isBaseType(ModelContext.fromParent(modelContext, resolvedType)); } }; } private boolean isBaseType(ModelContext modelContext) { String typeName = nameExtractor.typeName(modelContext); return Types.isBaseType(typeName); } private Predicate<ResolvedType> ignorableTypes(final ModelContext modelContext) { return new Predicate<ResolvedType>() { @Override public boolean apply(ResolvedType input) { return !modelContext.hasSeenBefore(input); } }; } private List<ResolvedType> resolvedDependencies(ModelContext modelContext) { ResolvedType resolvedType = modelContext.alternateFor(modelContext.resolvedType(typeResolver)); if (isBaseType(ModelContext.fromParent(modelContext, resolvedType))) { LOG.debug("Marking base type {} as seen", resolvedType.getSignature()); modelContext.seen(resolvedType); return newArrayList(); } List<ResolvedType> dependencies = newArrayList(resolvedTypeParameters(modelContext, resolvedType)); dependencies.addAll(resolvedArrayElementType(modelContext, resolvedType)); dependencies.addAll(resolvedPropertiesAndFields(modelContext, resolvedType)); return dependencies; } private List<? extends ResolvedType> resolvedArrayElementType(ModelContext modelContext, ResolvedType resolvedType) { List<ResolvedType> parameters = newArrayList(); if (resolvedType.isArray()) { ResolvedType elementType = resolvedType.getArrayElementType(); LOG.debug("Adding type for element {}", elementType.getSignature()); parameters.add(modelContext.alternateFor(elementType)); LOG.debug("Recursively resolving dependencies for element {}", elementType.getSignature()); parameters.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, elementType))); } return parameters; } private List<? extends ResolvedType> resolvedTypeParameters(ModelContext modelContext, ResolvedType resolvedType) { List<ResolvedType> parameters = newArrayList(); for (ResolvedType parameter : resolvedType.getTypeParameters()) { LOG.debug("Adding type for parameter {}", parameter.getSignature()); parameters.add(modelContext.alternateFor(parameter)); LOG.debug("Recursively resolving dependencies for parameter {}", parameter.getSignature()); parameters.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, parameter))); } return parameters; } private List<ResolvedType> resolvedPropertiesAndFields(ModelContext modelContext, ResolvedType resolvedType) { if (modelContext.hasSeenBefore(resolvedType) || enumTypeDeterminer.isEnum(resolvedType.getErasedType())) { return newArrayList(); } modelContext.seen(resolvedType); List<ResolvedType> properties = newArrayList(); for (ModelProperty property : nonTrivialProperties(modelContext, resolvedType)) { LOG.debug("Adding type {} for parameter {}", property.getType().getSignature(), property.getName()); if (!isMapType(property.getType())) { properties.add(property.getType()); } properties.addAll(maybeFromCollectionElementType(modelContext, property)); properties.addAll(maybeFromMapValueType(modelContext, property)); properties.addAll(maybeFromRegularType(modelContext, property)); } return properties; } private FluentIterable<ModelProperty> nonTrivialProperties(ModelContext modelContext, ResolvedType resolvedType) { return from(propertiesFor(modelContext, resolvedType)) .filter(not(baseProperty(modelContext))); } private Predicate<? super ModelProperty> baseProperty(final ModelContext modelContext) { return new Predicate<ModelProperty>() { @Override public boolean apply(ModelProperty input) { return isBaseType(ModelContext.fromParent(modelContext, input.getType())); } }; } private List<ResolvedType> maybeFromRegularType(ModelContext modelContext, ModelProperty property) { if (isContainerType(property.getType()) || isMapType(property.getType())) { return newArrayList(); } LOG.debug("Recursively resolving dependencies for type {}", resolvedTypeSignature(property.getType()).or("<null>")); return newArrayList(resolvedDependencies(ModelContext.fromParent(modelContext, property.getType()))); } private List<ResolvedType> maybeFromCollectionElementType(ModelContext modelContext, ModelProperty property) { List<ResolvedType> dependencies = newArrayList(); if (isContainerType(property.getType())) { ResolvedType collectionElementType = collectionElementType(property.getType()); String resolvedTypeSignature = resolvedTypeSignature(collectionElementType).or("<null>"); if (!isBaseType(ModelContext.fromParent(modelContext, collectionElementType))) { LOG.debug("Adding collectionElement type {}", resolvedTypeSignature); dependencies.add(collectionElementType); } LOG.debug("Recursively resolving dependencies for collectionElement type {}", resolvedTypeSignature); dependencies.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, collectionElementType))); } return dependencies; } private List<ResolvedType> maybeFromMapValueType(ModelContext modelContext, ModelProperty property) { List<ResolvedType> dependencies = newArrayList(); if (isMapType(property.getType())) { ResolvedType valueType = Maps.mapValueType(property.getType()); String resolvedTypeSignature = resolvedTypeSignature(valueType).or("<null>"); if (!isBaseType(ModelContext.fromParent(modelContext, valueType))) { LOG.debug("Adding value type {}", resolvedTypeSignature); dependencies.add(valueType); } LOG.debug("Recursively resolving dependencies for value type {}", resolvedTypeSignature); dependencies.addAll(resolvedDependencies(ModelContext.fromParent(modelContext, valueType))); } return dependencies; } private List<ModelProperty> propertiesFor(ModelContext modelContext, ResolvedType resolvedType) { return propertiesProvider.propertiesFor(resolvedType, modelContext); } }
Added support for resolving Map types specifically Because HashMaps do not have getters/setters that determine key or value types, and since closed generic types do not have type parameters, this lead to the bug that certain types in nested Map types would not show up in the model definitions. (2203)
springfox-schema/src/main/java/springfox/documentation/schema/DefaultModelDependencyProvider.java
Added support for resolving Map types specifically
Java
apache-2.0
e940c09fcf73fd68cb5f3a715362a481ef7cbb2f
0
material-components/material-components-android
/* * Copyright 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.material.catalog.tabs; import io.material.catalog.R; import android.content.res.TypedArray; import android.os.Bundle; import androidx.core.view.ViewCompat; import androidx.appcompat.widget.SwitchCompat; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.RadioButton; import android.widget.Spinner; import androidx.annotation.ArrayRes; import androidx.annotation.DrawableRes; import androidx.annotation.LayoutRes; import androidx.annotation.Nullable; import androidx.annotation.StringRes; import androidx.coordinatorlayout.widget.CoordinatorLayout; import androidx.viewpager.widget.ViewPager; import com.google.android.material.badge.BadgeDrawable; import com.google.android.material.tabs.TabLayout; import com.google.android.material.tabs.TabLayout.LabelVisibility; import com.google.android.material.tabs.TabLayout.OnTabSelectedListener; import com.google.android.material.tabs.TabLayout.Tab; import io.material.catalog.feature.DemoFragment; import io.material.catalog.feature.DemoUtils; import java.util.List; /** The main fragment that displays tabs demos for the Catalog app. */ public class TabsControllableDemoFragment extends DemoFragment { private static final int TAB_COUNT = 3; @DrawableRes private static final int ICON_DRAWABLE_RES = R.drawable.ic_tabs_24px; @StringRes private static final int LABEL_STRING_RES = R.string.cat_tab_item_label; private boolean showIcons = true; private boolean showLabels = true; private List<TabLayout> tabLayouts; private ViewPager pager; @Nullable @Override public View onCreateDemoView( LayoutInflater layoutInflater, @Nullable ViewGroup viewGroup, @Nullable Bundle bundle) { View view = layoutInflater.inflate( R.layout.cat_tabs_controllable_fragment, viewGroup, false /* attachToRoot */); ViewGroup content = view.findViewById(R.id.content); View tabsContent = layoutInflater.inflate(getTabsContent(), content, false /* attachToRoot */); content.addView(tabsContent, 0); tabLayouts = DemoUtils.findViewsWithType(view, TabLayout.class); pager = view.findViewById(R.id.viewpager); CoordinatorLayout coordinatorLayout = view.findViewById(R.id.coordinator_layout); ViewCompat.setOnApplyWindowInsetsListener( view, (v, insetsCompat) -> { View scrollable = coordinatorLayout.findViewById(R.id.cat_tabs_controllable_scrollview); scrollable.setPadding( scrollable.getPaddingLeft(), 0, scrollable.getPaddingRight(), scrollable.getPaddingBottom()); return insetsCompat; }); setupViewPager(); setAllTabLayoutIcons(ICON_DRAWABLE_RES); setAllTabLayoutText(LABEL_STRING_RES); setAllTabLayoutBadges(); SwitchCompat iconsToggle = view.findViewById(R.id.toggle_icons_switch); iconsToggle.setOnCheckedChangeListener( (buttonView, isChecked) -> { showIcons = isChecked; setAllTabLayoutIcons(ICON_DRAWABLE_RES); }); SwitchCompat labelsToggle = view.findViewById(R.id.toggle_labels_switch); labelsToggle.setOnCheckedChangeListener( (buttonView, isChecked) -> { showLabels = isChecked; if (isChecked) { for (TabLayout tabLayout : tabLayouts) { setLabelVisibility(tabLayout, TabLayout.TAB_LABEL_VISIBILITY_LABELED); } } else { for (TabLayout tabLayout : tabLayouts) { setLabelVisibility(tabLayout, TabLayout.TAB_LABEL_VISIBILITY_UNLABELED); } } }); RadioButton tabGravityFillButton = view.findViewById(R.id.tabs_gravity_fill_button); tabGravityFillButton.setOnClickListener(v -> setAllTabLayoutGravity(TabLayout.GRAVITY_FILL)); RadioButton tabGravityCenterButton = view.findViewById(R.id.tabs_gravity_center_button); tabGravityCenterButton.setOnClickListener( v -> setAllTabLayoutGravity(TabLayout.GRAVITY_CENTER)); SwitchCompat inlineToggle = view.findViewById(R.id.toggle_inline_switch); inlineToggle.setOnCheckedChangeListener( (buttonView, isChecked) -> setAllTabLayoutInline(isChecked)); Spinner selectedIndicatorSpinner = (Spinner) view.findViewById(R.id.selector_spinner); ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource( selectedIndicatorSpinner.getContext(), getSelectedIndicatorDrawableTitles(), android.R.layout.simple_spinner_item); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); selectedIndicatorSpinner.setAdapter(adapter); selectedIndicatorSpinner.setOnItemSelectedListener( new OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { setAllTabLayoutSelectedIndicators(position); } @Override public void onNothingSelected(AdapterView<?> parent) { setAllTabLayoutSelectedIndicators(0); } }); return view; } @ArrayRes protected int getSelectedIndicatorDrawableTitles() { return R.array.cat_tabs_selected_indicator_drawable_titles; } @ArrayRes protected int getSelectedIndicatorDrawables() { return R.array.cat_tabs_selected_indicator_drawables; } @ArrayRes protected int getSelectedIndicatorDrawableGravities() { return R.array.cat_tabs_selected_indicator_drawable_gravities; } @LayoutRes protected int getTabsContent() { return R.layout.cat_tabs_controllable_content; } private void setupViewPager() { pager.setAdapter(new TabsPagerAdapter(getChildFragmentManager(), getContext(), TAB_COUNT)); for (TabLayout tabLayout : tabLayouts) { tabLayout.setupWithViewPager(pager); } } private void setAllTabLayoutIcons(@DrawableRes int iconResId) { for (TabLayout tabLayout : tabLayouts) { setTabLayoutIcons(tabLayout, iconResId); } } private void setTabLayoutIcons(TabLayout tabLayout, @DrawableRes int iconResId) { for (int i = 0; i < tabLayout.getTabCount(); i++) { if (showIcons) { tabLayout.getTabAt(i).setIcon(iconResId); } else { tabLayout.getTabAt(i).setIcon(null); } } } private void setAllTabLayoutText(@StringRes int stringResId) { for (TabLayout tabLayout : tabLayouts) { setTabLayoutText(tabLayout, stringResId); } } private void setTabLayoutText(TabLayout tabLayout, @StringRes int stringResId) { for (int i = 0; i < tabLayout.getTabCount(); i++) { // Convert tab index (zero-based) to readable tab label starting at 1. tabLayout.getTabAt(i).setText(getResources().getString(stringResId, i + 1)); } } private void setAllTabLayoutBadges() { for (TabLayout tabLayout : tabLayouts) { setupBadging(tabLayout); tabLayout.addOnTabSelectedListener( new OnTabSelectedListener() { @Override public void onTabSelected(Tab tab) { tab.removeBadge(); } @Override public void onTabUnselected(Tab tab) {} @Override public void onTabReselected(Tab tab) { tab.removeBadge(); } }); } } private void setupBadging(TabLayout tabLayout) { BadgeDrawable badgeDrawable = tabLayout.getTabAt(0).getOrCreateBadge(); badgeDrawable.setVisible(true); badgeDrawable.setNumber(1); badgeDrawable = tabLayout.getTabAt(1).getOrCreateBadge(); badgeDrawable.setVisible(true); badgeDrawable.setNumber(88); badgeDrawable = tabLayout.getTabAt(2).getOrCreateBadge(); badgeDrawable.setVisible(true); badgeDrawable.setNumber(999); } private void setLabelVisibility(TabLayout tabLayout, @LabelVisibility int mode) { for (int i = 0; i < tabLayout.getTabCount(); i++) { tabLayout.getTabAt(i).setTabLabelVisibility(mode); } } private void setAllTabLayoutGravity(int gravity) { for (TabLayout tabLayout : tabLayouts) { tabLayout.setTabGravity(gravity); } } private void setAllTabLayoutInline(boolean inline) { for (TabLayout tabLayout : tabLayouts) { tabLayout.setInlineLabel(inline); } } private void setAllTabLayoutSelectedIndicators(int position) { TypedArray drawables = getResources().obtainTypedArray(getSelectedIndicatorDrawables()); @DrawableRes int drawableResId = drawables.getResourceId(position, 0); drawables.recycle(); TypedArray drawableGravities = getResources().obtainTypedArray(getSelectedIndicatorDrawableGravities()); int drawableGravity = drawableGravities.getInt(position, 0); drawableGravities.recycle(); for (TabLayout tabLayout : tabLayouts) { tabLayout.setSelectedTabIndicator(drawableResId); tabLayout.setSelectedTabIndicatorGravity(drawableGravity); } } }
catalog/java/io/material/catalog/tabs/TabsControllableDemoFragment.java
/* * Copyright 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.material.catalog.tabs; import io.material.catalog.R; import android.content.res.TypedArray; import android.os.Bundle; import androidx.core.view.ViewCompat; import androidx.appcompat.widget.SwitchCompat; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.RadioButton; import android.widget.Spinner; import androidx.annotation.ArrayRes; import androidx.annotation.DrawableRes; import androidx.annotation.LayoutRes; import androidx.annotation.Nullable; import androidx.annotation.StringRes; import androidx.coordinatorlayout.widget.CoordinatorLayout; import androidx.viewpager.widget.ViewPager; import com.google.android.material.badge.BadgeDrawable; import com.google.android.material.tabs.TabLayout; import com.google.android.material.tabs.TabLayout.LabelVisibility; import com.google.android.material.tabs.TabLayout.OnTabSelectedListener; import com.google.android.material.tabs.TabLayout.Tab; import io.material.catalog.feature.DemoFragment; import io.material.catalog.feature.DemoUtils; import java.util.List; /** The main fragment that displays tabs demos for the Catalog app. */ public class TabsControllableDemoFragment extends DemoFragment { private static final int TAB_COUNT = 3; @DrawableRes private static final int ICON_DRAWABLE_RES = R.drawable.ic_tabs_24px; @StringRes private static final int LABEL_STRING_RES = R.string.cat_tab_item_label; private boolean showIcons = true; private boolean showLabels = true; private List<TabLayout> tabLayouts; private ViewPager pager; @Nullable @Override public View onCreateDemoView( LayoutInflater layoutInflater, @Nullable ViewGroup viewGroup, @Nullable Bundle bundle) { View view = layoutInflater.inflate( R.layout.cat_tabs_controllable_fragment, viewGroup, false /* attachToRoot */); ViewGroup content = view.findViewById(R.id.content); View tabsContent = layoutInflater.inflate(getTabsContent(), content, false /* attachToRoot */); content.addView(tabsContent, 0); tabLayouts = DemoUtils.findViewsWithType(view, TabLayout.class); pager = view.findViewById(R.id.viewpager); CoordinatorLayout coordinatorLayout = view.findViewById(R.id.coordinator_layout); ViewCompat.setOnApplyWindowInsetsListener( view, (v, insetsCompat) -> { View scrollable = coordinatorLayout.findViewById(R.id.cat_tabs_controllable_scrollview); scrollable.setPadding( scrollable.getPaddingLeft(), 0, scrollable.getPaddingRight(), scrollable.getPaddingBottom()); return insetsCompat; }); setupViewPager(); setAllTabLayoutIcons(ICON_DRAWABLE_RES); setAllTabLayoutText(LABEL_STRING_RES); setAllTabLayoutBadges(); SwitchCompat iconsToggle = view.findViewById(R.id.toggle_icons_switch); iconsToggle.setOnCheckedChangeListener( (buttonView, isChecked) -> { showIcons = isChecked; setAllTabLayoutIcons(ICON_DRAWABLE_RES); }); SwitchCompat labelsToggle = view.findViewById(R.id.toggle_labels_switch); labelsToggle.setOnCheckedChangeListener( (buttonView, isChecked) -> { showLabels = isChecked; if (isChecked) { for (TabLayout tabLayout : tabLayouts) { setLabelVisibility(tabLayout, TabLayout.TAB_LABEL_VISIBILITY_LABELED); } } else { for (TabLayout tabLayout : tabLayouts) { setLabelVisibility(tabLayout, TabLayout.TAB_LABEL_VISIBILITY_UNLABELED); } } }); RadioButton tabGravityFillButton = view.findViewById(R.id.tabs_gravity_fill_button); tabGravityFillButton.setOnClickListener(v -> setAllTabLayoutGravity(TabLayout.GRAVITY_FILL)); RadioButton tabGravityCenterButton = view.findViewById(R.id.tabs_gravity_center_button); tabGravityCenterButton.setOnClickListener( v -> setAllTabLayoutGravity(TabLayout.GRAVITY_CENTER)); SwitchCompat inlineToggle = view.findViewById(R.id.toggle_inline_switch); inlineToggle.setOnCheckedChangeListener( (buttonView, isChecked) -> setAllTabLayoutInline(isChecked)); Spinner selectedIndicatorSpinner = (Spinner) view.findViewById(R.id.selector_spinner); ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource( selectedIndicatorSpinner.getContext(), getSelectedIndicatorDrawableTitles(), android.R.layout.simple_spinner_item); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); selectedIndicatorSpinner.setAdapter(adapter); selectedIndicatorSpinner.setOnItemSelectedListener( new OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { setAllTabLayoutSelectedIndicators(position); } @Override public void onNothingSelected(AdapterView<?> parent) { setAllTabLayoutSelectedIndicators(0); } }); return view; } @ArrayRes protected int getSelectedIndicatorDrawableTitles() { return R.array.cat_tabs_selected_indicator_drawable_titles; } @ArrayRes protected int getSelectedIndicatorDrawables() { return R.array.cat_tabs_selected_indicator_drawables; } @ArrayRes protected int getSelectedIndicatorDrawableGravities() { return R.array.cat_tabs_selected_indicator_drawable_gravities; } @LayoutRes protected int getTabsContent() { return R.layout.cat_tabs_controllable_content; } private void setupViewPager() { pager.setAdapter(new TabsPagerAdapter(getFragmentManager(), getContext(), TAB_COUNT)); for (TabLayout tabLayout : tabLayouts) { tabLayout.setupWithViewPager(pager); } } private void setAllTabLayoutIcons(@DrawableRes int iconResId) { for (TabLayout tabLayout : tabLayouts) { setTabLayoutIcons(tabLayout, iconResId); } } private void setTabLayoutIcons(TabLayout tabLayout, @DrawableRes int iconResId) { for (int i = 0; i < tabLayout.getTabCount(); i++) { if (showIcons) { tabLayout.getTabAt(i).setIcon(iconResId); } else { tabLayout.getTabAt(i).setIcon(null); } } } private void setAllTabLayoutText(@StringRes int stringResId) { for (TabLayout tabLayout : tabLayouts) { setTabLayoutText(tabLayout, stringResId); } } private void setTabLayoutText(TabLayout tabLayout, @StringRes int stringResId) { for (int i = 0; i < tabLayout.getTabCount(); i++) { // Convert tab index (zero-based) to readable tab label starting at 1. tabLayout.getTabAt(i).setText(getResources().getString(stringResId, i + 1)); } } private void setAllTabLayoutBadges() { for (TabLayout tabLayout : tabLayouts) { setupBadging(tabLayout); tabLayout.addOnTabSelectedListener( new OnTabSelectedListener() { @Override public void onTabSelected(Tab tab) { tab.removeBadge(); } @Override public void onTabUnselected(Tab tab) {} @Override public void onTabReselected(Tab tab) { tab.removeBadge(); } }); } } private void setupBadging(TabLayout tabLayout) { BadgeDrawable badgeDrawable = tabLayout.getTabAt(0).getOrCreateBadge(); badgeDrawable.setVisible(true); badgeDrawable.setNumber(1); badgeDrawable = tabLayout.getTabAt(1).getOrCreateBadge(); badgeDrawable.setVisible(true); badgeDrawable.setNumber(88); badgeDrawable = tabLayout.getTabAt(2).getOrCreateBadge(); badgeDrawable.setVisible(true); badgeDrawable.setNumber(999); } private void setLabelVisibility(TabLayout tabLayout, @LabelVisibility int mode) { for (int i = 0; i < tabLayout.getTabCount(); i++) { tabLayout.getTabAt(i).setTabLabelVisibility(mode); } } private void setAllTabLayoutGravity(int gravity) { for (TabLayout tabLayout : tabLayouts) { tabLayout.setTabGravity(gravity); } } private void setAllTabLayoutInline(boolean inline) { for (TabLayout tabLayout : tabLayouts) { tabLayout.setInlineLabel(inline); } } private void setAllTabLayoutSelectedIndicators(int position) { TypedArray drawables = getResources().obtainTypedArray(getSelectedIndicatorDrawables()); @DrawableRes int drawableResId = drawables.getResourceId(position, 0); drawables.recycle(); TypedArray drawableGravities = getResources().obtainTypedArray(getSelectedIndicatorDrawableGravities()); int drawableGravity = drawableGravities.getInt(position, 0); drawableGravities.recycle(); for (TabLayout tabLayout : tabLayouts) { tabLayout.setSelectedTabIndicator(drawableResId); tabLayout.setSelectedTabIndicatorGravity(drawableGravity); } } }
[Catalog] Fix the wrong use of FragmentManager in TabsControllableDemoFragment Resolves https://github.com/material-components/material-components-android/issues/1553 Resolves https://github.com/material-components/material-components-android/pull/1554 GIT_ORIGIN_REV_ID=4c7c1ab154892c665b4a67215a88e781da218ab6 PiperOrigin-RevId: 324207016
catalog/java/io/material/catalog/tabs/TabsControllableDemoFragment.java
[Catalog] Fix the wrong use of FragmentManager in TabsControllableDemoFragment
Java
apache-2.0
4bd6caa372999a85a17781b2e3dd1446de0092ba
0
akrherz/Openfire,GregDThomas/Openfire,GregDThomas/Openfire,Gugli/Openfire,akrherz/Openfire,speedy01/Openfire,igniterealtime/Openfire,GregDThomas/Openfire,guusdk/Openfire,GregDThomas/Openfire,speedy01/Openfire,magnetsystems/message-openfire,akrherz/Openfire,igniterealtime/Openfire,speedy01/Openfire,igniterealtime/Openfire,guusdk/Openfire,GregDThomas/Openfire,igniterealtime/Openfire,magnetsystems/message-openfire,magnetsystems/message-openfire,Gugli/Openfire,Gugli/Openfire,speedy01/Openfire,igniterealtime/Openfire,guusdk/Openfire,speedy01/Openfire,akrherz/Openfire,Gugli/Openfire,Gugli/Openfire,guusdk/Openfire,akrherz/Openfire,magnetsystems/message-openfire,magnetsystems/message-openfire,guusdk/Openfire
/** * $Revision$ * $Date$ * * Copyright (C) 2006 Jive Software. All rights reserved. * * This software is published under the terms of the GNU Public License (GPL), * a copy of which is included in this distribution. */ package org.jivesoftware.wildfire.gateway.protocols.yahoo; import org.jivesoftware.util.Log; import org.jivesoftware.util.LocaleUtils; import org.jivesoftware.wildfire.gateway.*; import org.xmpp.packet.JID; import org.xmpp.packet.Presence; import ymsg.network.StatusConstants; /** * Yahoo Transport Interface. * * This handles the bulk of the XMPP work via BaseTransport and provides * some gateway specific interactions. * * @author Daniel Henninger */ public class YahooTransport extends BaseTransport { /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyUsername() */ public String getTerminologyUsername() { return LocaleUtils.getLocalizedString("gateway.yahoo.username", "gateway"); } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyPassword() */ public String getTerminologyPassword() { return LocaleUtils.getLocalizedString("gateway.yahoo.password", "gateway"); } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyNickname() */ public String getTerminologyNickname() { return null; } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyRegistration() */ public String getTerminologyRegistration() { return LocaleUtils.getLocalizedString("gateway.yahoo.registration", "gateway"); } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#isPasswordRequired() */ public Boolean isPasswordRequired() { return true; } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#isNicknameRequired() */ public Boolean isNicknameRequired() { return false; } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#isUsernameValid(String) */ public Boolean isUsernameValid(String username) { return username.matches("[\\w\\.]+"); } /** * Handles creating a Yahoo session and triggering a login. * * @param registration Registration information to be used to log in. * @param jid JID that is logged into the transport. * @param presenceType Type of presence. * @param verboseStatus Longer status description. */ public TransportSession registrationLoggedIn(Registration registration, JID jid, PresenceType presenceType, String verboseStatus, Integer priority) { Log.debug("Logging in to Yahoo gateway."); TransportSession session = new YahooSession(registration, jid, this, priority); this.getSessionManager().startThread(session); ((YahooSession)session).logIn(presenceType, verboseStatus); return session; } /** * Handles logging out of a Yahoo session. * * @param session The session to be disconnected. */ public void registrationLoggedOut(TransportSession session) { Log.debug("Logging out of Yahoo gateway."); ((YahooSession)session).logOut(); session.sessionDone(); // Just in case. session.setLoginStatus(TransportLoginStatus.LOGGED_OUT); } /** * Converts a jabber status to an Yahoo status. * * @param jabStatus Jabber presence type. * @return Yahoo status identifier. */ public long convertJabStatusToYahoo(PresenceType jabStatus) { if (jabStatus == PresenceType.available) { return StatusConstants.STATUS_AVAILABLE; } else if (jabStatus == PresenceType.away) { return StatusConstants.STATUS_BRB; } else if (jabStatus == PresenceType.xa) { return StatusConstants.STATUS_STEPPEDOUT; } else if (jabStatus == PresenceType.dnd) { return StatusConstants.STATUS_BUSY; } else if (jabStatus == PresenceType.chat) { return StatusConstants.STATUS_AVAILABLE; } else if (jabStatus == PresenceType.unavailable) { return StatusConstants.STATUS_OFFLINE; } else { return StatusConstants.STATUS_AVAILABLE; } } /** * Sets up a presence packet according to Yahoo status. * * @param packet Presence packet to be set up. * @param yahooStatus Yahoo StatusConstants constant. */ public void setUpPresencePacket(Presence packet, long yahooStatus) { if (yahooStatus == StatusConstants.STATUS_AVAILABLE) { // We're good, leave the type as blank for available. } else if (yahooStatus == StatusConstants.STATUS_BRB) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_BUSY) { packet.setShow(Presence.Show.dnd); } else if (yahooStatus == StatusConstants.STATUS_IDLE) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_OFFLINE) { packet.setType(Presence.Type.unavailable); } else if (yahooStatus == StatusConstants.STATUS_NOTATDESK) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_NOTINOFFICE) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_ONPHONE) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_ONVACATION) { packet.setShow(Presence.Show.xa); } else if (yahooStatus == StatusConstants.STATUS_OUTTOLUNCH) { packet.setShow(Presence.Show.xa); } else if (yahooStatus == StatusConstants.STATUS_STEPPEDOUT) { packet.setShow(Presence.Show.away); } else { // Not something we handle, we're going to ignore it. } } }
src/plugins/gateway/src/java/org/jivesoftware/wildfire/gateway/protocols/yahoo/YahooTransport.java
/** * $Revision$ * $Date$ * * Copyright (C) 2006 Jive Software. All rights reserved. * * This software is published under the terms of the GNU Public License (GPL), * a copy of which is included in this distribution. */ package org.jivesoftware.wildfire.gateway.protocols.yahoo; import org.jivesoftware.util.Log; import org.jivesoftware.util.LocaleUtils; import org.jivesoftware.wildfire.gateway.*; import org.xmpp.packet.JID; import org.xmpp.packet.Presence; import ymsg.network.StatusConstants; /** * Yahoo Transport Interface. * * This handles the bulk of the XMPP work via BaseTransport and provides * some gateway specific interactions. * * @author Daniel Henninger */ public class YahooTransport extends BaseTransport { /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyUsername() */ public String getTerminologyUsername() { return LocaleUtils.getLocalizedString("gateway.yahoo.username", "gateway"); } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyPassword() */ public String getTerminologyPassword() { return LocaleUtils.getLocalizedString("gateway.yahoo.password", "gateway"); } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyNickname() */ public String getTerminologyNickname() { return null; } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#getTerminologyRegistration() */ public String getTerminologyRegistration() { return LocaleUtils.getLocalizedString("gateway.yahoo.registration", "gateway"); } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#isPasswordRequired() */ public Boolean isPasswordRequired() { return true; } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#isNicknameRequired() */ public Boolean isNicknameRequired() { return false; } /** * @see org.jivesoftware.wildfire.gateway.BaseTransport#isUsernameValid(String) */ public Boolean isUsernameValid(String username) { return username.matches("\\w+"); } /** * Handles creating a Yahoo session and triggering a login. * * @param registration Registration information to be used to log in. * @param jid JID that is logged into the transport. * @param presenceType Type of presence. * @param verboseStatus Longer status description. */ public TransportSession registrationLoggedIn(Registration registration, JID jid, PresenceType presenceType, String verboseStatus, Integer priority) { Log.debug("Logging in to Yahoo gateway."); TransportSession session = new YahooSession(registration, jid, this, priority); this.getSessionManager().startThread(session); ((YahooSession)session).logIn(presenceType, verboseStatus); return session; } /** * Handles logging out of a Yahoo session. * * @param session The session to be disconnected. */ public void registrationLoggedOut(TransportSession session) { Log.debug("Logging out of Yahoo gateway."); ((YahooSession)session).logOut(); session.sessionDone(); // Just in case. session.setLoginStatus(TransportLoginStatus.LOGGED_OUT); } /** * Converts a jabber status to an Yahoo status. * * @param jabStatus Jabber presence type. * @return Yahoo status identifier. */ public long convertJabStatusToYahoo(PresenceType jabStatus) { if (jabStatus == PresenceType.available) { return StatusConstants.STATUS_AVAILABLE; } else if (jabStatus == PresenceType.away) { return StatusConstants.STATUS_BRB; } else if (jabStatus == PresenceType.xa) { return StatusConstants.STATUS_STEPPEDOUT; } else if (jabStatus == PresenceType.dnd) { return StatusConstants.STATUS_BUSY; } else if (jabStatus == PresenceType.chat) { return StatusConstants.STATUS_AVAILABLE; } else if (jabStatus == PresenceType.unavailable) { return StatusConstants.STATUS_OFFLINE; } else { return StatusConstants.STATUS_AVAILABLE; } } /** * Sets up a presence packet according to Yahoo status. * * @param packet Presence packet to be set up. * @param yahooStatus Yahoo StatusConstants constant. */ public void setUpPresencePacket(Presence packet, long yahooStatus) { if (yahooStatus == StatusConstants.STATUS_AVAILABLE) { // We're good, leave the type as blank for available. } else if (yahooStatus == StatusConstants.STATUS_BRB) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_BUSY) { packet.setShow(Presence.Show.dnd); } else if (yahooStatus == StatusConstants.STATUS_IDLE) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_OFFLINE) { packet.setType(Presence.Type.unavailable); } else if (yahooStatus == StatusConstants.STATUS_NOTATDESK) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_NOTINOFFICE) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_ONPHONE) { packet.setShow(Presence.Show.away); } else if (yahooStatus == StatusConstants.STATUS_ONVACATION) { packet.setShow(Presence.Show.xa); } else if (yahooStatus == StatusConstants.STATUS_OUTTOLUNCH) { packet.setShow(Presence.Show.xa); } else if (yahooStatus == StatusConstants.STATUS_STEPPEDOUT) { packet.setShow(Presence.Show.away); } else { // Not something we handle, we're going to ignore it. } } }
[GATE-157] Added ability to have yahoo usernames with .'s in them. git-svn-id: 4206c2c2bb40b5782672a0d03c2c381094954de9@7348 b35dd754-fafc-0310-a699-88a17e54d16e
src/plugins/gateway/src/java/org/jivesoftware/wildfire/gateway/protocols/yahoo/YahooTransport.java
[GATE-157] Added ability to have yahoo usernames with .'s in them.
Java
apache-2.0
8c4f970e6efc93679db125a9adafcfb7a3fa7984
0
OpenUniversity/ovirt-engine,OpenUniversity/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine,halober/ovirt-engine,halober/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine,halober/ovirt-engine,zerodengxinchao/ovirt-engine,eayun/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine,yingyun001/ovirt-engine,yapengsong/ovirt-engine,halober/ovirt-engine,yingyun001/ovirt-engine,yapengsong/ovirt-engine,yapengsong/ovirt-engine,zerodengxinchao/ovirt-engine,eayun/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine
package org.ovirt.engine.core.bll.snapshots; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.bll.ImagesHandler; import org.ovirt.engine.core.bll.context.CompensationContext; import org.ovirt.engine.core.bll.memory.MemoryUtils; import org.ovirt.engine.core.bll.network.VmInterfaceManager; import org.ovirt.engine.core.bll.network.vm.VnicProfileHelper; import org.ovirt.engine.core.bll.utils.ClusterUtils; import org.ovirt.engine.core.bll.utils.VmDeviceUtils; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.businessentities.DbUser; import org.ovirt.engine.core.common.businessentities.Disk; import org.ovirt.engine.core.common.businessentities.DiskImage; import org.ovirt.engine.core.common.businessentities.ImageStatus; import org.ovirt.engine.core.common.businessentities.Quota; import org.ovirt.engine.core.common.businessentities.Snapshot; import org.ovirt.engine.core.common.businessentities.Snapshot.SnapshotStatus; import org.ovirt.engine.core.common.businessentities.Snapshot.SnapshotType; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VmDevice; import org.ovirt.engine.core.common.businessentities.VmDeviceGeneralType; import org.ovirt.engine.core.common.businessentities.VmStatic; import org.ovirt.engine.core.common.businessentities.VmTemplate; import org.ovirt.engine.core.common.businessentities.image_storage_domain_map; import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface; import org.ovirt.engine.core.common.utils.VmDeviceType; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.core.dal.dbbroker.DbFacade; import org.ovirt.engine.core.dao.BaseDiskDao; import org.ovirt.engine.core.dao.DiskDao; import org.ovirt.engine.core.dao.DiskImageDAO; import org.ovirt.engine.core.dao.QuotaDAO; import org.ovirt.engine.core.dao.SnapshotDao; import org.ovirt.engine.core.dao.VdsGroupDAO; import org.ovirt.engine.core.dao.VmDeviceDAO; import org.ovirt.engine.core.dao.VmDynamicDAO; import org.ovirt.engine.core.dao.VmStaticDAO; import org.ovirt.engine.core.dao.VmTemplateDAO; import org.ovirt.engine.core.dao.network.VmNetworkInterfaceDao; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; import org.ovirt.engine.core.utils.ovf.OvfManager; import org.ovirt.engine.core.utils.ovf.OvfReaderException; import org.ovirt.engine.core.utils.ovf.VMStaticOvfLogHandler; /** * The {@link Snapshot} manager is used to easily add/update/remove snapshots. */ public class SnapshotsManager { private final static Log log = LogFactory.getLog(SnapshotsManager.class); /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created in status {@link SnapshotStatus#OK} by default. * * @see #addActiveSnapshot(Guid, VM, SnapshotStatus, CompensationContext) * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param compensationContext * Context for saving compensation details. * @return the newly created snapshot */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, final CompensationContext compensationContext) { return addActiveSnapshot(snapshotId, vm, SnapshotStatus.OK, "", compensationContext); } /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created in status {@link SnapshotStatus#OK} by default. * * @see #addActiveSnapshot(Guid, VM, SnapshotStatus, CompensationContext) * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param snapshotStatus * The initial status of the created snapshot * @param compensationContext * Context for saving compensation details. * @return the newly created snapshot */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, SnapshotStatus snapshotStatus, final CompensationContext compensationContext) { return addActiveSnapshot(snapshotId, vm, snapshotStatus, "", compensationContext); } /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created in status {@link SnapshotStatus#OK} by default. * * @see #addActiveSnapshot(Guid, VM, SnapshotStatus, CompensationContext) * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param memoryVolume * The memory state for the created snapshot * @param compensationContext * Context for saving compensation details. * @return the newly created snapshot */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, String memoryVolume, final CompensationContext compensationContext) { return addActiveSnapshot(snapshotId, vm, SnapshotStatus.OK, memoryVolume, compensationContext); } /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created with the given status {@link SnapshotStatus}. * * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param snapshotStatus * The initial status of the snapshot * @param compensationContext * Context for saving compensation details. */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, SnapshotStatus snapshotStatus, String memoryVolume, final CompensationContext compensationContext) { return addSnapshot(snapshotId, "Active VM", snapshotStatus, SnapshotType.ACTIVE, vm, false, memoryVolume, compensationContext); } /** * Add a new snapshot, saving it to the DB (with compensation). The VM's current configuration (including Disks & * NICs) will be saved in the snapshot.<br> * The snapshot is created in status {@link SnapshotStatus#LOCKED} by default. * * @param snapshotId * The ID for the snapshot. * @param description * The snapshot description. * @param snapshotType * The snapshot type. * @param vm * The VM to save in configuration. * @param memoryVolume * the volume in which the snapshot's memory is stored * @param compensationContext * Context for saving compensation details. * @return the added snapshot */ public Snapshot addSnapshot(Guid snapshotId, String description, SnapshotType snapshotType, VM vm, String memoryVolume, final CompensationContext compensationContext) { return addSnapshot(snapshotId, description, SnapshotStatus.LOCKED, snapshotType, vm, true, memoryVolume, compensationContext); } /** * Save snapshot to DB with compensation data. * * @param snapshotId * The snapshot ID. * @param description * The snapshot description. * @param snapshotStatus * The snapshot status. * @param snapshotType * The snapshot type. * @param vm * The VM to link to & save configuration for (if necessary). * @param saveVmConfiguration * Should VM configuration be generated and saved? * @param compensationContext * In case compensation is needed. * @return the saved snapshot */ public Snapshot addSnapshot(Guid snapshotId, String description, SnapshotStatus snapshotStatus, SnapshotType snapshotType, VM vm, boolean saveVmConfiguration, String memoryVolume, final CompensationContext compensationContext) { final Snapshot snapshot = new Snapshot(snapshotId, snapshotStatus, vm.getId(), saveVmConfiguration ? generateVmConfiguration(vm) : null, snapshotType, description, new Date(), vm.getAppList(), memoryVolume); getSnapshotDao().save(snapshot); compensationContext.snapshotNewEntity(snapshot); return snapshot; } /** * Generate a string containing the given VM's configuration. * * @param vm * The VM to generate configuration from. * @return A String containing the VM configuration. */ protected String generateVmConfiguration(VM vm) { if (vm.getInterfaces() == null || vm.getInterfaces().isEmpty()) { vm.setInterfaces(getVmNetworkInterfaceDao().getAllForVm(vm.getId())); } if (StringUtils.isEmpty(vm.getVmtName())) { VmTemplate t = getVmTemplateDao().get(vm.getVmtGuid()); vm.setVmtName(t.getName()); } VmDeviceUtils.setVmDevices(vm.getStaticData()); ArrayList<DiskImage> images = new ArrayList<DiskImage>(ImagesHandler.filterImageDisks(getDiskDao().getAllForVm(vm.getId()), false, true, true)); for (DiskImage image : images) { image.setStorageIds(null); } return new OvfManager().ExportVm(vm, images, ClusterUtils.getCompatibilityVersion(vm)); } /** * Remove all the snapshots that belong to the given VM. * * @param vmId * The ID of the VM. * @return Set of memoryVolumes of the removed snapshots */ public Set<String> removeSnapshots(Guid vmId) { final List<Snapshot> vmSnapshots = getSnapshotDao().getAll(vmId); for (Snapshot snapshot : vmSnapshots) { getSnapshotDao().remove(snapshot.getId()); } return MemoryUtils.getMemoryVolumesFromSnapshots(vmSnapshots); } /** * Remove all illegal disks which were associated with the given snapshot. This is done in order to be able to * switch correctly between snapshots where illegal images might be present. * * @param vmId * The vm ID the disk is associated with. * @param snapshotId * The ID of the snapshot for who to remove illegal images for. */ public void removeAllIllegalDisks(Guid snapshotId, Guid vmId) { for (DiskImage diskImage : getDiskImageDao().getAllSnapshotsForVmSnapshot(snapshotId)) { if (diskImage.getImageStatus() == ImageStatus.ILLEGAL) { ImagesHandler.removeDiskImage(diskImage, vmId); } } } /** * Attempt to read the configuration that is stored in the snapshot, and restore the VM from it.<br> * The NICs and Disks will be restored from the configuration (if available).<br> * <br> * <b>Note:</b>If the configuration is <code>null</code> or can't be decoded, then the VM configuration will remain * as it was but the underlying storage would still have changed.. * * @param snapshot * The snapshot containing the configuration. * @param version * The compatibility version of the VM's cluster * @param user * The user that performs the action */ public void attempToRestoreVmConfigurationFromSnapshot(VM vm, Snapshot snapshot, Guid activeSnapshotId, CompensationContext compensationContext, Version version, DbUser user) { boolean vmUpdatedFromConfiguration = false; if (snapshot.getVmConfiguration() != null) { vmUpdatedFromConfiguration = updateVmFromConfiguration(vm, snapshot.getVmConfiguration()); } if (!vmUpdatedFromConfiguration) { vm.setImages(new ArrayList<DiskImage>(getDiskImageDao().getAllSnapshotsForVmSnapshot(snapshot.getId()))); } vm.setAppList(snapshot.getAppList()); getVmDynamicDao().update(vm.getDynamicData()); synchronizeDisksFromSnapshot(vm.getId(), snapshot.getId(), activeSnapshotId, vm.getImages(), vm.getName()); if (vmUpdatedFromConfiguration) { getVmStaticDao().update(vm.getStaticData()); synchronizeNics(vm, compensationContext, user); for (VmDevice vmDevice : getVmDeviceDao().getVmDeviceByVmId(vm.getId())) { if (deviceCanBeRemoved(vmDevice)) { getVmDeviceDao().remove(vmDevice.getId()); } } VmDeviceUtils.addImportedDevices(vm.getStaticData(), false); } } /** * @param vmDevice * @return true if the device can be removed (disk which allows snapshot can be removed as it is part * of the snapshot. Other disks shouldn't be removed as they are not part of the snapshot). */ private boolean deviceCanBeRemoved(VmDevice vmDevice) { if (!vmDevice.getDevice().equals(VmDeviceType.DISK.getName()) || !vmDevice.getIsManaged()) { return true; } return vmDevice.getSnapshotId() == null && getDiskDao().get(vmDevice.getDeviceId()).isAllowSnapshot(); } /** * Update the given VM with the (static) data that is contained in the configuration. The {@link VM#getImages()} * will contain the images that were read from the configuration. * * @param vm * The VM to update. * @param configuration * The configuration to update from. * @return In case of a problem reading the configuration, <code>false</code>. Otherwise, <code>true</code>. */ public boolean updateVmFromConfiguration(VM vm, String configuration) { try { VmStatic oldVmStatic = vm.getStaticData(); VM tempVM = new VM(); ArrayList<DiskImage> images = new ArrayList<DiskImage>(); ArrayList<VmNetworkInterface> interfaces = new ArrayList<VmNetworkInterface>(); new OvfManager().ImportVm(configuration, tempVM, images, interfaces); for (DiskImage diskImage : images) { DiskImage dbImage = getDiskImageDao().getSnapshotById(diskImage.getImageId()); if (dbImage != null) { diskImage.setStorageIds(dbImage.getStorageIds()); } } new VMStaticOvfLogHandler(tempVM.getStaticData()).resetDefaults(oldVmStatic); vm.setStaticData(tempVM.getStaticData()); vm.setImages(images); vm.setInterfaces(interfaces); // These fields are not saved in the OVF, so get them from the current VM. vm.setDedicatedVmForVds(oldVmStatic.getDedicatedVmForVds()); vm.setIsoPath(oldVmStatic.getIsoPath()); vm.setVdsGroupId(oldVmStatic.getVdsGroupId()); // The VM configuration does not hold the vds group Id. // It is necessary to fetch the vm static from the Db, in order to get this information VmStatic vmStaticFromDb = getVmStaticDao().get(vm.getId()); if (vmStaticFromDb != null) { VDSGroup vdsGroup = getVdsGroupDao().get(vmStaticFromDb.getVdsGroupId()); if (vdsGroup != null) { vm.setStoragePoolId(vdsGroup.getStoragePoolId()); vm.setVdsGroupCompatibilityVersion(vdsGroup.getcompatibility_version()); vm.setVdsGroupName(vdsGroup.getName()); vm.setVdsGroupCpuName(vdsGroup.getcpu_name()); } } validateQuota(vm); return true; } catch (OvfReaderException e) { log.errorFormat("Failed to update VM from the configuration: {0}).", configuration, e); return false; } } /** * Validate whether the quota supplied in snapshot configuration exists in<br> * current setup, if not reset to null.<br> * * @param vm * imported vm */ private void validateQuota(VM vm) { if (vm.getQuotaId() != null) { Quota quota = getQuotaDao().getById(vm.getQuotaId()); if (quota == null) { vm.setQuotaId(null); } } } /** * Synchronize the VM's {@link VmNetworkInterface}s with the ones from the snapshot.<br> * All existing NICs will be deleted, and the ones from the snapshot re-added.<br> * In case a MAC address is already in use, the user will be issued a warning in the audit log. * * @param nics * The nics from snapshot. * @param version * The compatibility version of the VM's cluster * @param user * The user that performs the action */ protected void synchronizeNics(VM vm, CompensationContext compensationContext, DbUser user) { VmInterfaceManager vmInterfaceManager = new VmInterfaceManager(); VnicProfileHelper vnicProfileHelper = new VnicProfileHelper(vm.getVdsGroupId(), vm.getStoragePoolId(), vm.getVdsGroupCompatibilityVersion(), AuditLogType.IMPORTEXPORT_SNAPSHOT_VM_INVALID_INTERFACES); vmInterfaceManager.removeAll(vm.getId()); for (VmNetworkInterface vmInterface : vm.getInterfaces()) { vmInterface.setVmId(vm.getId()); // These fields might not be saved in the OVF, so fill them with reasonable values. if (vmInterface.getId() == null) { vmInterface.setId(Guid.newGuid()); } vnicProfileHelper.updateNicWithVnicProfileForUser(vmInterface, user); vmInterfaceManager.add(vmInterface, compensationContext, false, vm.getOs(), vm.getVdsGroupCompatibilityVersion()); } vnicProfileHelper.auditInvalidInterfaces(vm.getName()); } /** * Synchronize the VM's Disks with the images from the snapshot:<br> * <ul> * <li>Existing disks are updated.</li> * <li>Disks that don't exist anymore get re-added.</li> * <ul> * <li>If the image is still in the DB, the disk is linked to it.</li> * <li>If the image is not in the DB anymore, the disk will be marked as "broken"</li> * </ul> * </ul> * * @param vmId * The VM ID is needed to re-add disks. * @param snapshotId * The snapshot ID is used to find only the VM disks at the time. * @param disksFromSnapshot * The disks that existed in the snapshot. */ protected void synchronizeDisksFromSnapshot(Guid vmId, Guid snapshotId, Guid activeSnapshotId, List<DiskImage> disksFromSnapshot, String vmName) { List<Guid> diskIdsFromSnapshot = new ArrayList<Guid>(); // Sync disks that exist or existed in the snapshot. int count = 1; for (DiskImage diskImage : disksFromSnapshot) { diskIdsFromSnapshot.add(diskImage.getId()); if (getBaseDiskDao().exists(diskImage.getId())) { getBaseDiskDao().update(diskImage); } else { // If can't find the image, insert it as illegal so that it can't be used and make the device unplugged. if (getDiskImageDao().getSnapshotById(diskImage.getImageId()) == null) { diskImage.setImageStatus(ImageStatus.ILLEGAL); diskImage.setVmSnapshotId(activeSnapshotId); ImagesHandler.addImage(diskImage, true, (diskImage.getStorageIds() == null) ? null : new image_storage_domain_map(diskImage.getImageId(), diskImage.getStorageIds().get(0), diskImage.getQuotaId())); } ImagesHandler.addDiskToVm(diskImage, vmId); } diskImage.setDiskAlias(ImagesHandler.getSuggestedDiskAlias(diskImage, vmName, count)); count++; } removeDisksNotInSnapshot(vmId, diskIdsFromSnapshot); } /** * Remove all the disks which are allowed to be snapshot but not exist in the snapshot and are not disk snapshots * @param vmId - The vm id which is being snapshot. * @param diskIdsFromSnapshot - An image group id list for images which are part of the VM. */ private void removeDisksNotInSnapshot(Guid vmId, List<Guid> diskIdsFromSnapshot) { for (VmDevice vmDevice : getVmDeviceDao().getVmDeviceByVmIdTypeAndDevice( vmId, VmDeviceGeneralType.DISK, VmDeviceType.DISK.getName())) { if (!diskIdsFromSnapshot.contains(vmDevice.getDeviceId()) && vmDevice.getSnapshotId() == null) { Disk disk = getDiskDao().get(vmDevice.getDeviceId()); if (disk != null && disk.isAllowSnapshot()) { getBaseDiskDao().remove(vmDevice.getDeviceId()); getVmDeviceDao().remove(vmDevice.getId()); } } } } protected VmDeviceDAO getVmDeviceDao() { return DbFacade.getInstance().getVmDeviceDao(); } protected BaseDiskDao getBaseDiskDao() { return DbFacade.getInstance().getBaseDiskDao(); } protected SnapshotDao getSnapshotDao() { return DbFacade.getInstance().getSnapshotDao(); } protected VmDynamicDAO getVmDynamicDao() { return DbFacade.getInstance().getVmDynamicDao(); } protected VmStaticDAO getVmStaticDao() { return DbFacade.getInstance().getVmStaticDao(); } protected DiskImageDAO getDiskImageDao() { return DbFacade.getInstance().getDiskImageDao(); } protected DiskDao getDiskDao() { return DbFacade.getInstance().getDiskDao(); } protected VdsGroupDAO getVdsGroupDao() { return DbFacade.getInstance().getVdsGroupDao(); } protected VmTemplateDAO getVmTemplateDao() { return DbFacade.getInstance().getVmTemplateDao(); } protected VmNetworkInterfaceDao getVmNetworkInterfaceDao() { return DbFacade.getInstance().getVmNetworkInterfaceDao(); } protected QuotaDAO getQuotaDao() { return DbFacade.getInstance().getQuotaDao(); } }
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/snapshots/SnapshotsManager.java
package org.ovirt.engine.core.bll.snapshots; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.bll.ImagesHandler; import org.ovirt.engine.core.bll.context.CompensationContext; import org.ovirt.engine.core.bll.memory.MemoryUtils; import org.ovirt.engine.core.bll.network.VmInterfaceManager; import org.ovirt.engine.core.bll.network.vm.VnicProfileHelper; import org.ovirt.engine.core.bll.utils.ClusterUtils; import org.ovirt.engine.core.bll.utils.VmDeviceUtils; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.businessentities.DbUser; import org.ovirt.engine.core.common.businessentities.Disk; import org.ovirt.engine.core.common.businessentities.DiskImage; import org.ovirt.engine.core.common.businessentities.ImageStatus; import org.ovirt.engine.core.common.businessentities.Quota; import org.ovirt.engine.core.common.businessentities.Snapshot; import org.ovirt.engine.core.common.businessentities.Snapshot.SnapshotStatus; import org.ovirt.engine.core.common.businessentities.Snapshot.SnapshotType; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VmDevice; import org.ovirt.engine.core.common.businessentities.VmDeviceGeneralType; import org.ovirt.engine.core.common.businessentities.VmStatic; import org.ovirt.engine.core.common.businessentities.VmTemplate; import org.ovirt.engine.core.common.businessentities.image_storage_domain_map; import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface; import org.ovirt.engine.core.common.utils.VmDeviceType; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.core.dal.dbbroker.DbFacade; import org.ovirt.engine.core.dao.BaseDiskDao; import org.ovirt.engine.core.dao.DiskDao; import org.ovirt.engine.core.dao.DiskImageDAO; import org.ovirt.engine.core.dao.QuotaDAO; import org.ovirt.engine.core.dao.SnapshotDao; import org.ovirt.engine.core.dao.VdsGroupDAO; import org.ovirt.engine.core.dao.VmDAO; import org.ovirt.engine.core.dao.VmDeviceDAO; import org.ovirt.engine.core.dao.VmDynamicDAO; import org.ovirt.engine.core.dao.VmStaticDAO; import org.ovirt.engine.core.dao.VmTemplateDAO; import org.ovirt.engine.core.dao.network.VmNetworkInterfaceDao; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; import org.ovirt.engine.core.utils.ovf.OvfManager; import org.ovirt.engine.core.utils.ovf.OvfReaderException; import org.ovirt.engine.core.utils.ovf.VMStaticOvfLogHandler; /** * The {@link Snapshot} manager is used to easily add/update/remove snapshots. */ public class SnapshotsManager { private final static Log log = LogFactory.getLog(SnapshotsManager.class); /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created in status {@link SnapshotStatus#OK} by default. * * @see #addActiveSnapshot(Guid, VM, SnapshotStatus, CompensationContext) * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param compensationContext * Context for saving compensation details. * @return the newly created snapshot */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, final CompensationContext compensationContext) { return addActiveSnapshot(snapshotId, vm, SnapshotStatus.OK, "", compensationContext); } /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created in status {@link SnapshotStatus#OK} by default. * * @see #addActiveSnapshot(Guid, VM, SnapshotStatus, CompensationContext) * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param snapshotStatus * The initial status of the created snapshot * @param compensationContext * Context for saving compensation details. * @return the newly created snapshot */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, SnapshotStatus snapshotStatus, final CompensationContext compensationContext) { return addActiveSnapshot(snapshotId, vm, snapshotStatus, "", compensationContext); } /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created in status {@link SnapshotStatus#OK} by default. * * @see #addActiveSnapshot(Guid, VM, SnapshotStatus, CompensationContext) * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param memoryVolume * The memory state for the created snapshot * @param compensationContext * Context for saving compensation details. * @return the newly created snapshot */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, String memoryVolume, final CompensationContext compensationContext) { return addActiveSnapshot(snapshotId, vm, SnapshotStatus.OK, memoryVolume, compensationContext); } /** * Save an active snapshot for the VM, without saving the configuration.<br> * The snapshot is created with the given status {@link SnapshotStatus}. * * @param snapshotId * The ID for the snapshot. * @param vm * The VM to save the snapshot for. * @param snapshotStatus * The initial status of the snapshot * @param compensationContext * Context for saving compensation details. */ public Snapshot addActiveSnapshot(Guid snapshotId, VM vm, SnapshotStatus snapshotStatus, String memoryVolume, final CompensationContext compensationContext) { return addSnapshot(snapshotId, "Active VM", snapshotStatus, SnapshotType.ACTIVE, vm, false, memoryVolume, compensationContext); } /** * Add a new snapshot, saving it to the DB (with compensation). The VM's current configuration (including Disks & * NICs) will be saved in the snapshot.<br> * The snapshot is created in status {@link SnapshotStatus#LOCKED} by default. * * @param snapshotId * The ID for the snapshot. * @param description * The snapshot description. * @param snapshotType * The snapshot type. * @param vm * The VM to save in configuration. * @param memoryVolume * the volume in which the snapshot's memory is stored * @param compensationContext * Context for saving compensation details. * @return the added snapshot */ public Snapshot addSnapshot(Guid snapshotId, String description, SnapshotType snapshotType, VM vm, String memoryVolume, final CompensationContext compensationContext) { return addSnapshot(snapshotId, description, SnapshotStatus.LOCKED, snapshotType, vm, true, memoryVolume, compensationContext); } /** * Save snapshot to DB with compensation data. * * @param snapshotId * The snapshot ID. * @param description * The snapshot description. * @param snapshotStatus * The snapshot status. * @param snapshotType * The snapshot type. * @param vm * The VM to link to & save configuration for (if necessary). * @param saveVmConfiguration * Should VM configuration be generated and saved? * @param compensationContext * In case compensation is needed. * @return the saved snapshot */ public Snapshot addSnapshot(Guid snapshotId, String description, SnapshotStatus snapshotStatus, SnapshotType snapshotType, VM vm, boolean saveVmConfiguration, String memoryVolume, final CompensationContext compensationContext) { final Snapshot snapshot = new Snapshot(snapshotId, snapshotStatus, vm.getId(), saveVmConfiguration ? generateVmConfiguration(vm) : null, snapshotType, description, new Date(), vm.getAppList(), memoryVolume); getSnapshotDao().save(snapshot); compensationContext.snapshotNewEntity(snapshot); return snapshot; } /** * Generate a string containing the given VM's configuration. * * @param vm * The VM to generate configuration from. * @return A String containing the VM configuration. */ protected String generateVmConfiguration(VM vm) { if (vm.getInterfaces() == null || vm.getInterfaces().isEmpty()) { vm.setInterfaces(getVmNetworkInterfaceDao().getAllForVm(vm.getId())); } if (StringUtils.isEmpty(vm.getVmtName())) { VmTemplate t = getVmTemplateDao().get(vm.getVmtGuid()); vm.setVmtName(t.getName()); } VmDeviceUtils.setVmDevices(vm.getStaticData()); ArrayList<DiskImage> images = new ArrayList<DiskImage>(ImagesHandler.filterImageDisks(getDiskDao().getAllForVm(vm.getId()), false, true, true)); for (DiskImage image : images) { image.setStorageIds(null); } return new OvfManager().ExportVm(vm, images, ClusterUtils.getCompatibilityVersion(vm)); } /** * Remove all the snapshots that belong to the given VM. * * @param vmId * The ID of the VM. * @return Set of memoryVolumes of the removed snapshots */ public Set<String> removeSnapshots(Guid vmId) { final List<Snapshot> vmSnapshots = getSnapshotDao().getAll(vmId); for (Snapshot snapshot : vmSnapshots) { getSnapshotDao().remove(snapshot.getId()); } return MemoryUtils.getMemoryVolumesFromSnapshots(vmSnapshots); } /** * Remove all illegal disks which were associated with the given snapshot. This is done in order to be able to * switch correctly between snapshots where illegal images might be present. * * @param vmId * The vm ID the disk is associated with. * @param snapshotId * The ID of the snapshot for who to remove illegal images for. */ public void removeAllIllegalDisks(Guid snapshotId, Guid vmId) { for (DiskImage diskImage : getDiskImageDao().getAllSnapshotsForVmSnapshot(snapshotId)) { if (diskImage.getImageStatus() == ImageStatus.ILLEGAL) { ImagesHandler.removeDiskImage(diskImage, vmId); } } } /** * Attempt to read the configuration that is stored in the snapshot, and restore the VM from it.<br> * The NICs and Disks will be restored from the configuration (if available).<br> * <br> * <b>Note:</b>If the configuration is <code>null</code> or can't be decoded, then the VM configuration will remain * as it was but the underlying storage would still have changed.. * * @param snapshot * The snapshot containing the configuration. * @param version * The compatibility version of the VM's cluster * @param user * The user that performs the action */ public void attempToRestoreVmConfigurationFromSnapshot(VM vm, Snapshot snapshot, Guid activeSnapshotId, CompensationContext compensationContext, Version version, DbUser user) { boolean vmUpdatedFromConfiguration = false; if (snapshot.getVmConfiguration() != null) { vmUpdatedFromConfiguration = updateVmFromConfiguration(vm, snapshot.getVmConfiguration()); } if (!vmUpdatedFromConfiguration) { vm.setImages(new ArrayList<DiskImage>(getDiskImageDao().getAllSnapshotsForVmSnapshot(snapshot.getId()))); } vm.setAppList(snapshot.getAppList()); getVmDynamicDao().update(vm.getDynamicData()); synchronizeDisksFromSnapshot(vm.getId(), snapshot.getId(), activeSnapshotId, vm.getImages(), vm.getName()); if (vmUpdatedFromConfiguration) { getVmStaticDao().update(vm.getStaticData()); synchronizeNics(vm, compensationContext, user); for (VmDevice vmDevice : getVmDeviceDao().getVmDeviceByVmId(vm.getId())) { if (deviceCanBeRemoved(vmDevice)) { getVmDeviceDao().remove(vmDevice.getId()); } } VmDeviceUtils.addImportedDevices(vm.getStaticData(), false); } } /** * @param vmDevice * @return true if the device can be removed (disk which allows snapshot can be removed as it is part * of the snapshot. Other disks shouldn't be removed as they are not part of the snapshot). */ private boolean deviceCanBeRemoved(VmDevice vmDevice) { if (!vmDevice.getDevice().equals(VmDeviceType.DISK.getName()) || !vmDevice.getIsManaged()) { return true; } return vmDevice.getSnapshotId() == null && getDiskDao().get(vmDevice.getDeviceId()).isAllowSnapshot(); } /** * Update the given VM with the (static) data that is contained in the configuration. The {@link VM#getImages()} * will contain the images that were read from the configuration. * * @param vm * The VM to update. * @param configuration * The configuration to update from. * @return In case of a problem reading the configuration, <code>false</code>. Otherwise, <code>true</code>. */ public boolean updateVmFromConfiguration(VM vm, String configuration) { try { VmStatic oldVmStatic = vm.getStaticData(); VM tempVM = new VM(); ArrayList<DiskImage> images = new ArrayList<DiskImage>(); ArrayList<VmNetworkInterface> interfaces = new ArrayList<VmNetworkInterface>(); new OvfManager().ImportVm(configuration, tempVM, images, interfaces); for (DiskImage diskImage : images) { DiskImage dbImage = getDiskImageDao().getSnapshotById(diskImage.getImageId()); if (dbImage != null) { diskImage.setStorageIds(dbImage.getStorageIds()); } } new VMStaticOvfLogHandler(tempVM.getStaticData()).resetDefaults(oldVmStatic); vm.setStaticData(tempVM.getStaticData()); vm.setImages(images); vm.setInterfaces(interfaces); // These fields are not saved in the OVF, so get them from the current VM. vm.setDedicatedVmForVds(oldVmStatic.getDedicatedVmForVds()); vm.setIsoPath(oldVmStatic.getIsoPath()); vm.setVdsGroupId(oldVmStatic.getVdsGroupId()); // The VM configuration does not hold the vds group Id. // It is necessary to fetch the vm static from the Db, in order to get this information VmStatic vmStaticFromDb = getVmStaticDao().get(vm.getId()); if (vmStaticFromDb != null) { VDSGroup vdsGroup = getVdsGroupDao().get(vmStaticFromDb.getVdsGroupId()); if (vdsGroup != null) { vm.setStoragePoolId(vdsGroup.getStoragePoolId()); vm.setVdsGroupCompatibilityVersion(vdsGroup.getcompatibility_version()); vm.setVdsGroupName(vdsGroup.getName()); vm.setVdsGroupCpuName(vdsGroup.getcpu_name()); } } validateQuota(vm); return true; } catch (OvfReaderException e) { log.errorFormat("Failed to update VM from the configuration: {0}).", configuration, e); return false; } } /** * Validate whether the quota supplied in snapshot configuration exists in<br> * current setup, if not reset to null.<br> * * @param vm * imported vm */ private void validateQuota(VM vm) { if (vm.getQuotaId() != null) { Quota quota = getQuotaDao().getById(vm.getQuotaId()); if (quota == null) { vm.setQuotaId(null); } } } /** * Synchronize the VM's {@link VmNetworkInterface}s with the ones from the snapshot.<br> * All existing NICs will be deleted, and the ones from the snapshot re-added.<br> * In case a MAC address is already in use, the user will be issued a warning in the audit log. * * @param nics * The nics from snapshot. * @param version * The compatibility version of the VM's cluster * @param user * The user that performs the action */ protected void synchronizeNics(VM vm, CompensationContext compensationContext, DbUser user) { VmInterfaceManager vmInterfaceManager = new VmInterfaceManager(); VnicProfileHelper vnicProfileHelper = new VnicProfileHelper(vm.getVdsGroupId(), vm.getStoragePoolId(), vm.getVdsGroupCompatibilityVersion(), AuditLogType.IMPORTEXPORT_SNAPSHOT_VM_INVALID_INTERFACES); vmInterfaceManager.removeAll(vm.getId()); for (VmNetworkInterface vmInterface : vm.getInterfaces()) { vmInterface.setVmId(vm.getId()); // These fields might not be saved in the OVF, so fill them with reasonable values. if (vmInterface.getId() == null) { vmInterface.setId(Guid.newGuid()); } vnicProfileHelper.updateNicWithVnicProfileForUser(vmInterface, user); vmInterfaceManager.add(vmInterface, compensationContext, false, vm.getOs(), vm.getVdsGroupCompatibilityVersion()); } vnicProfileHelper.auditInvalidInterfaces(vm.getName()); } /** * Synchronize the VM's Disks with the images from the snapshot:<br> * <ul> * <li>Existing disks are updated.</li> * <li>Disks that don't exist anymore get re-added.</li> * <ul> * <li>If the image is still in the DB, the disk is linked to it.</li> * <li>If the image is not in the DB anymore, the disk will be marked as "broken"</li> * </ul> * </ul> * * @param vmId * The VM ID is needed to re-add disks. * @param snapshotId * The snapshot ID is used to find only the VM disks at the time. * @param disksFromSnapshot * The disks that existed in the snapshot. */ protected void synchronizeDisksFromSnapshot(Guid vmId, Guid snapshotId, Guid activeSnapshotId, List<DiskImage> disksFromSnapshot, String vmName) { List<Guid> diskIdsFromSnapshot = new ArrayList<Guid>(); // Sync disks that exist or existed in the snapshot. int count = 1; for (DiskImage diskImage : disksFromSnapshot) { diskIdsFromSnapshot.add(diskImage.getId()); if (getBaseDiskDao().exists(diskImage.getId())) { getBaseDiskDao().update(diskImage); } else { // If can't find the image, insert it as illegal so that it can't be used and make the device unplugged. if (getDiskImageDao().getSnapshotById(diskImage.getImageId()) == null) { diskImage.setImageStatus(ImageStatus.ILLEGAL); diskImage.setVmSnapshotId(activeSnapshotId); ImagesHandler.addImage(diskImage, true, (diskImage.getStorageIds() == null) ? null : new image_storage_domain_map(diskImage.getImageId(), diskImage.getStorageIds().get(0), diskImage.getQuotaId())); } ImagesHandler.addDiskToVm(diskImage, vmId); } diskImage.setDiskAlias(ImagesHandler.getSuggestedDiskAlias(diskImage, vmName, count)); count++; } removeDisksNotInSnapshot(vmId, diskIdsFromSnapshot); } /** * Remove all the disks which are allowed to be snapshot but not exist in the snapshot and are not disk snapshots * @param vmId - The vm id which is being snapshot. * @param diskIdsFromSnapshot - An image group id list for images which are part of the VM. */ private void removeDisksNotInSnapshot(Guid vmId, List<Guid> diskIdsFromSnapshot) { for (VmDevice vmDevice : getVmDeviceDao().getVmDeviceByVmIdTypeAndDevice( vmId, VmDeviceGeneralType.DISK, VmDeviceType.DISK.getName())) { if (!diskIdsFromSnapshot.contains(vmDevice.getDeviceId()) && vmDevice.getSnapshotId() == null) { Disk disk = getDiskDao().get(vmDevice.getDeviceId()); if (disk != null && disk.isAllowSnapshot()) { getBaseDiskDao().remove(vmDevice.getDeviceId()); getVmDeviceDao().remove(vmDevice.getId()); } } } } protected VmDeviceDAO getVmDeviceDao() { return DbFacade.getInstance().getVmDeviceDao(); } protected BaseDiskDao getBaseDiskDao() { return DbFacade.getInstance().getBaseDiskDao(); } protected VmDAO getVmDao() { return DbFacade.getInstance().getVmDao(); } protected SnapshotDao getSnapshotDao() { return DbFacade.getInstance().getSnapshotDao(); } protected VmDynamicDAO getVmDynamicDao() { return DbFacade.getInstance().getVmDynamicDao(); } protected VmStaticDAO getVmStaticDao() { return DbFacade.getInstance().getVmStaticDao(); } protected DiskImageDAO getDiskImageDao() { return DbFacade.getInstance().getDiskImageDao(); } protected DiskDao getDiskDao() { return DbFacade.getInstance().getDiskDao(); } protected VdsGroupDAO getVdsGroupDao() { return DbFacade.getInstance().getVdsGroupDao(); } protected VmTemplateDAO getVmTemplateDao() { return DbFacade.getInstance().getVmTemplateDao(); } protected VmNetworkInterfaceDao getVmNetworkInterfaceDao() { return DbFacade.getInstance().getVmNetworkInterfaceDao(); } protected QuotaDAO getQuotaDao() { return DbFacade.getInstance().getQuotaDao(); } }
core: Remove unused SnapshotManager.getVmDao() Change-Id: I5f65b591abef6b3581ed483f9f240e5f37d1e86c Signed-off-by: Allon Mureinik <[email protected]>
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/snapshots/SnapshotsManager.java
core: Remove unused SnapshotManager.getVmDao()
Java
apache-2.0
558465a21ba29e079279bbbb872ea96d6ea7b3a4
0
camac/XPagesPhoneNumberControl
package com.gregorbyte.xsp.converter; import javax.faces.application.FacesMessage; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.convert.Converter; import javax.faces.convert.ConverterException; import com.google.i18n.phonenumbers.NumberParseException; import com.google.i18n.phonenumbers.PhoneNumberUtil; import com.google.i18n.phonenumbers.PhoneNumberUtil.PhoneNumberFormat; import com.google.i18n.phonenumbers.Phonenumber.PhoneNumber; import com.ibm.commons.util.StringUtil; public class PhoneNumberConverter implements Converter { private String defaultCountryCode = null; public Object getAsObject(FacesContext context, UIComponent component, String value) { if (StringUtil.isEmpty(value)) return value; PhoneNumberUtil util = PhoneNumberUtil.getInstance(); String defCountryCode = getDefaultCountryCode(); try { PhoneNumber number = util.parse(value, defCountryCode); if (!util.isValidNumber(number)) { FacesMessage fm = new FacesMessage(); fm.setSummary("Phone Number is not valid for country code " + defCountryCode); fm.setDetail("The supplied Phone Number is not valid to the country code " + defCountryCode); fm.setSeverity(FacesMessage.SEVERITY_ERROR); throw new ConverterException(fm); } else { return util.format(number, PhoneNumberFormat.INTERNATIONAL); } } catch (NumberParseException e) { FacesMessage fm = new FacesMessage(); fm.setSummary("The Phone Number entered is not in a valid format for " + defCountryCode); fm.setDetail(e.getMessage()); fm.setSeverity(FacesMessage.SEVERITY_ERROR); throw new ConverterException(fm); } } public String getAsString(FacesContext context, UIComponent component, Object value) { if (value == null) return null; return value.toString(); } public String getDefaultCountryCode() { if (this.defaultCountryCode != null) { return this.defaultCountryCode; } return "AU"; } public void setDefaultCountryCode(String defaultCountryCode) { this.defaultCountryCode = defaultCountryCode; } }
com.gregorbyte.xsp.phonenumber.nsf/Code/Java/com/gregorbyte/xsp/converter/PhoneNumberConverter.java
package com.gregorbyte.xsp.converter; import javax.faces.application.FacesMessage; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.convert.Converter; import javax.faces.convert.ConverterException; import com.google.i18n.phonenumbers.NumberParseException; import com.google.i18n.phonenumbers.PhoneNumberUtil; import com.google.i18n.phonenumbers.PhoneNumberUtil.PhoneNumberFormat; import com.google.i18n.phonenumbers.Phonenumber.PhoneNumber; import com.ibm.commons.util.StringUtil; public class PhoneNumberConverter implements Converter { public Object getAsObject(FacesContext context, UIComponent component, String value) { if (StringUtil.isEmpty(value)) return value; PhoneNumberUtil util = PhoneNumberUtil.getInstance(); String defCountryCode = "AU"; try { PhoneNumber number = util.parse(value, defCountryCode); if (!util.isValidNumber(number)) { FacesMessage fm = new FacesMessage(); fm.setSummary("Phone Number is not valid for country code " + defCountryCode); fm.setDetail("The supplied Phone Number is not valid to the country code " + defCountryCode); fm.setSeverity(FacesMessage.SEVERITY_ERROR); throw new ConverterException(fm); } else { return util.format(number, PhoneNumberFormat.INTERNATIONAL); } } catch (NumberParseException e) { FacesMessage fm = new FacesMessage(); fm.setSummary("The Phone Number entered is not in a valid format for " + defCountryCode); fm.setDetail(e.getMessage()); fm.setSeverity(FacesMessage.SEVERITY_ERROR); throw new ConverterException(fm); } } public String getAsString(FacesContext context, UIComponent component, Object value) { if (value == null) return null; return value.toString(); } }
Added Default Country Code parameter to PhoneNumberConverter java class
com.gregorbyte.xsp.phonenumber.nsf/Code/Java/com/gregorbyte/xsp/converter/PhoneNumberConverter.java
Added Default Country Code parameter to PhoneNumberConverter java class
Java
apache-2.0
c500c357250a00318b7359a139719c7773391391
0
klehmann/domino-jna
package com.mindoo.domino.jna.utils; import com.mindoo.domino.jna.NotesDatabase; import com.mindoo.domino.jna.NotesDateRange; import com.mindoo.domino.jna.NotesTimeDate; import com.mindoo.domino.jna.errors.NotesError; import com.mindoo.domino.jna.errors.NotesErrorUtils; import com.mindoo.domino.jna.gc.IAllocatedMemory; import com.mindoo.domino.jna.gc.NotesGC; import com.mindoo.domino.jna.internal.DisposableMemory; import com.mindoo.domino.jna.internal.NotesConstants; import com.mindoo.domino.jna.internal.NotesNativeAPI; import com.mindoo.domino.jna.internal.NotesNativeAPI32; import com.mindoo.domino.jna.internal.NotesNativeAPI64; import com.mindoo.domino.jna.internal.structs.NotesTimeDateStruct; import com.sun.jna.Memory; import com.sun.jna.Pointer; import com.sun.jna.ptr.IntByReference; import com.sun.jna.ptr.LongByReference; import com.sun.jna.ptr.PointerByReference; import com.sun.jna.ptr.ShortByReference; /** * Utility class to read and write Out-Of-Office (OOO) information * for a Dominp user. * * @author Karsten Lehmann */ public class NotesOOOUtils { /** * This function should be called prior to performing any OOO operation.<br> * It initializes values for each specific user.<br> * <br> * When you are finished with the logic of a specific operation you are required * to call OOOEndOperation routine.<br> * For example,to check the state of OOO functionality for a specific user you would * call OOOStartOperation, OOOGetState, OOOEndOperation. All strings are LMBCS * strings.<br> * The user is required to have a minimum of Editor level access in the ACL of * their mail file.<br> * <br> * Effeciency Considerations:<br> * For most efficient operation specify all optional parameters (home mail server * and handle to the user’s mail file).<br> * If home mail server is not specified or if the mail file handle is not provided, * this function will look up this information on the server specified in * <code>homeMailServer</code> parameter.<br> * <br> * If that lookup fails it will attempt a look up locally on the server where the * application is running.<br> * <br> * If the second lookup fails and handle to the mail file was provided, then a lookup * on the server where the database is located will be performed. If you would like * to suppress the extra look ups and limit the look up only to the server which * was specified in pMailServer parameter use the following ini variable on the * server where this api/application is running.<br> * <br> * SUPRESS_OOO_DIRECTORY_FAILOVER_LOOKUP = 1<br> * <br> * When multiple lookups are performed it is typically a sign that there is a * configuration problem in the domain and an event indicating this will be logged * to the server console (and DDM).<br> * <br> * This event will be generated 5 or more minutes apart to avoid flooding the server. * * @param mailOwnerName Canonical or abbreviated name of the owner of the mail where we are turning on OOO,Mandatory parameter. * @param homeMailServer Canonical or abbreviated name of the server where the lookup for user information should be made (optional). If the server name is not a home mail server, an attempt will be made to figure out the home mail server by looking first locally and, if configured, in the extended directory. The lookups can be suppressed by providing the server name in <code>homeMailServer</code> parameter and setting the <code>isHomeMailServer</code> parameter to TRUE. Suppressing lookups is a more efficient option. * @param isHomeMailServer TRUE if the <code>homeMailServer</code> is user’s home mail(optional). Set it only if you are sure that user’s home mail server was specified. If FALSE the look up for user’s home mail will be performed. * @param dbMail If the application already has the mail file opened they can pass it in for better better efficiency. * @return OOO context to read or write data */ public static NotesOOOContext startOperation(String mailOwnerName, String homeMailServer, boolean isHomeMailServer, NotesDatabase dbMail) { Memory mailOwnerNameMem = NotesStringUtils.toLMBCS(NotesNamingUtils.toCanonicalName(mailOwnerName), true); Memory homeMailServerMem = NotesStringUtils.toLMBCS(NotesNamingUtils.toCanonicalName(homeMailServer), true); PointerByReference pOOOOContext = new PointerByReference(); short result; result = NotesNativeAPI.get().OOOInit(); NotesErrorUtils.checkResult(result); if (PlatformUtils.is64Bit()) { LongByReference hOOOContext = new LongByReference(); result = NotesNativeAPI64.get().OOOStartOperation(mailOwnerNameMem, homeMailServerMem, isHomeMailServer ? 1 : 0, dbMail==null ? 0 : dbMail.getHandle64(), hOOOContext, pOOOOContext); NotesErrorUtils.checkResult(result); NotesOOOContext ctx = new NotesOOOContext(hOOOContext.getValue(), pOOOOContext.getValue()); NotesGC.__memoryAllocated(ctx); return ctx; } else { IntByReference hOOOContext = new IntByReference(); result = NotesNativeAPI32.get().OOOStartOperation(mailOwnerNameMem, homeMailServerMem, isHomeMailServer ? 1 : 0, dbMail==null ? 0 : dbMail.getHandle32(), hOOOContext, pOOOOContext); NotesErrorUtils.checkResult(result); NotesOOOContext ctx = new NotesOOOContext(hOOOContext.getValue(), pOOOOContext.getValue()); NotesGC.__memoryAllocated(ctx); return ctx; } } public static class NotesOOOContext implements IAllocatedMemory { public static enum OOOType {AGENT, SERVICE} private int m_hOOOContext32; private long m_hOOOContext64; private Pointer m_pOOOContext; private NotesOOOContext(int hOOOContext, Pointer pOOOContext) { if (PlatformUtils.is64Bit()) throw new NotesError(0, "Constructor is 32-bit only"); m_hOOOContext32 = hOOOContext; m_pOOOContext = pOOOContext; } private NotesOOOContext(long hOOOContext, Pointer pOOOContext) { if (!PlatformUtils.is64Bit()) throw new NotesError(0, "Constructor is 64-bit only"); m_hOOOContext64 = hOOOContext; m_pOOOContext = pOOOContext; } @Override public void free() { if (isFreed()) return; if (PlatformUtils.is64Bit()) { short result = NotesNativeAPI64.get().OOOEndOperation(m_hOOOContext64, m_pOOOContext); NotesErrorUtils.checkResult(result); m_hOOOContext64 = 0; } else { short result = NotesNativeAPI32.get().OOOEndOperation(m_hOOOContext32, m_pOOOContext); NotesErrorUtils.checkResult(result); m_hOOOContext32 = 0; } } @Override public boolean isFreed() { return PlatformUtils.is64Bit() ? m_hOOOContext64==0 : m_hOOOContext32==0; } @Override public int getHandle32() { return m_hOOOContext32; } @Override public long getHandle64() { return m_hOOOContext64; } private void checkHandle() { if (PlatformUtils.is64Bit()) { if (m_hOOOContext64==0) throw new NotesError(0, "OOO context already recycled"); NotesGC.__b64_checkValidMemHandle(NotesOOOContext.class, m_hOOOContext64); } else { if (m_hOOOContext32==0) throw new NotesError(0, "OOO context already recycled"); NotesGC.__b32_checkValidMemHandle(NotesOOOContext.class, m_hOOOContext32); } } /** * This function returns time parameters that control OOO. * * @return away period */ public NotesDateRange getAwayPeriod() { checkHandle(); NotesTimeDateStruct tdStartAwayStruct = NotesTimeDateStruct.newInstance(); NotesTimeDateStruct tdEndAwayStruct = NotesTimeDateStruct.newInstance(); short result = NotesNativeAPI.get().OOOGetAwayPeriod(m_pOOOContext, tdStartAwayStruct, tdEndAwayStruct); NotesErrorUtils.checkResult(result); NotesTimeDate tdStartAway = new NotesTimeDate(tdStartAwayStruct); NotesTimeDate tdEndAway = new NotesTimeDate(tdEndAwayStruct); return new NotesDateRange(tdStartAway, tdEndAway); } /** * This function returns a flag which defines how to treat internet emails.<br> * This functional call is optional.<br> * If this flag is set to TRUE OOO notifications will not be generated for<br> * email originating from the internet. The default for this flag is TRUE. * * @return true if excluded */ public boolean isExcludeInternet() { checkHandle(); IntByReference bExcludeInternet = new IntByReference(); short result = NotesNativeAPI.get().OOOGetExcludeInternet(m_pOOOContext, bExcludeInternet); NotesErrorUtils.checkResult(result); return bExcludeInternet.getValue()==1; } /** * This function sets a flag which defines how to treat internet emails.<br> * <br> * This functional call is optional.<br> * If this flag is set to TRUE OOO notifications will not be generated for * email originating from the internet.<br> * The default for this flag is TRUE. * * @param exclude true to exclude */ public void setExcludeInternet(boolean exclude) { checkHandle(); short result = NotesNativeAPI.get().OOOSetExcludeInternet(m_pOOOContext, exclude ? 1 : 0); NotesErrorUtils.checkResult(result); } /** * Convenience method to check whether the OOO functionality is enabled. Calls * {@link #getState(Ref, Ref)} internally. * * @return true if enabled */ public boolean isEnabled() { Ref<Boolean> retIsEnabled = new Ref<Boolean>(); getState(null, retIsEnabled); return Boolean.TRUE.equals(retIsEnabled.get()); } /** * Convenience method to read which kind of OOO system is used (agent or service). * Calls {@link #getState(Ref, Ref)} internally. * * @return type */ public OOOType getType() { Ref<OOOType> retType = new Ref<OOOType>(); getState(retType, null); return retType.get(); } /** * This function returns the version (agent, service) and the state (disabled, enabled) * of the out of office functionality.<br> * The version information can be used to show or hide UI elements that might not be * supported for a given version.<br> * For example, the agent does not support durations of less than 1 day and some * clients might choose not to show the hours in the user interface.<br> * When you need to make {@link #getState(Ref, Ref)} as efficient as possible, call * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)} * with the home mail server and the opened mail database.<br> * This function is read only and does not return an error if user ACL rights * are below Editor (which are required to turn on/off the Out of office functionality).<br> * If {@link #getState(Ref, Ref)} is called immediately following OOOEnable it will * not reflect the state set by the OOOEnable.<br> * To see the current state call {@link #free()} and start a new operation using * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link NotesOOOContext#getState(Ref, Ref)} and {@link #free()}. * @param retType returns the type of the OOO system (agent or service) * @param retIsEnabled returns whether the service is enabled for the user */ public void getState(Ref<OOOType> retType, Ref<Boolean> retIsEnabled) { checkHandle(); ShortByReference retVersion = new ShortByReference(); ShortByReference retState = new ShortByReference(); short result = NotesNativeAPI.get().OOOGetState(m_pOOOContext, retVersion, retState); NotesErrorUtils.checkResult(result); if (retType!=null) { if (retVersion.getValue() == 1) { retType.set(OOOType.AGENT); } else if (retVersion.getValue() == 2) { retType.set(OOOType.SERVICE); } } if (retIsEnabled!=null) { if (retState.getValue()==1) { retIsEnabled.set(Boolean.TRUE); } else { retIsEnabled.set(Boolean.FALSE); } } } /** * OOO supports two sets of messages.<br> * <br> * They are called General message/subject and Special message/subject.<br> * This function gets the general subject.<br> * This is string that will appear as the subject line of the OOO notification. * * @return subject */ public String getGeneralSubject() { checkHandle(); DisposableMemory retSubject = new DisposableMemory(NotesConstants.OOOPROF_MAX_BODY_SIZE); try { short result = NotesNativeAPI.get().OOOGetGeneralSubject(m_pOOOContext, retSubject); NotesErrorUtils.checkResult(result); String subject = NotesStringUtils.fromLMBCS(retSubject, -1); return subject; } finally { retSubject.dispose(); } } /** * OOO supports two sets of messages. They are called General message/subject and * Special message/subject.<br> * This function returns the text of the general message. * * @return message */ public String getGeneralMessage() { checkHandle(); //first get the length ShortByReference retGeneralMessageLen = new ShortByReference(); short result = NotesNativeAPI.get().OOOGetGeneralMessage(m_pOOOContext, null, retGeneralMessageLen); NotesErrorUtils.checkResult(result); int iGeneralMessageLen = (int) (retGeneralMessageLen.getValue() & 0xffff); if (iGeneralMessageLen==0) return ""; DisposableMemory retMessage = new DisposableMemory(iGeneralMessageLen + 1); try { result = NotesNativeAPI.get().OOOGetGeneralMessage(m_pOOOContext, retMessage, retGeneralMessageLen); NotesErrorUtils.checkResult(result); String msg = NotesStringUtils.fromLMBCS(retMessage, retGeneralMessageLen.getValue()); return msg; } finally { retMessage.dispose(); } } /** * This function validates and sets the time parameters that control OOO.<br> * <br> * This information is required for enabling the OOO.<br> * If you want turn on OOO functionality for a given period of time the * sequence of calls needed is:<br> * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)}, {@link #setEnabled(boolean)} * and {@link #free()}.<br> * <br> * When you need to enable OOO (i.e. call it with <code>enabled</code> flag set to TRUE) * you should call {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} prior to calling * {@link #setEnabled(boolean)}.<br> * <br> * If you need to change the length of the away period after OOO has already been * enabled, the sequence of calls needed to perform this action is * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)}, {@link #free()}.<br> * <br> * If the Domino server is configured to run an OOO agent, it can only be turned on * for full days, the time portion of the date parameter will not be used. * * @param tdStartAway This is date and time when Out of office will begin. * @param tdEndAway This is date and time when Out of office will end. */ public void setAwayPeriod(NotesTimeDate tdStartAway, NotesTimeDate tdEndAway) { checkHandle(); NotesTimeDateStruct.ByValue tdStartWayByVal = NotesTimeDateStruct.ByValue.newInstance(tdStartAway.getInnards()); NotesTimeDateStruct.ByValue tdEndWayByVal = NotesTimeDateStruct.ByValue.newInstance(tdEndAway.getInnards()); short result = NotesNativeAPI.get().OOOSetAwayPeriod(m_pOOOContext, tdStartWayByVal, tdEndWayByVal); NotesErrorUtils.checkResult(result); } /** * This function changes the state of the OOO functionality as indicated by * the <code>enabled</code> variable.<br> * If the OOO functionality is already in the state indicated by the * <code>enabled</code> flag, this function does nothing.<br> * <br> * When you need to enable OOO (i.e. call it with <code>enabled</code> flag set * to TRUE) you should call {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} * prior to calling {@link #setEnabled(boolean)}.<br> * <br> * If {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} is not called, * {@link #setEnabled(boolean)} will use the previous value for start and end.<br> * <br> * If they are in the past then the OOO functionality will not be enabled.<br> * When you need to disable OOO (i.e. call it with <code>enabled</code> set to FALSE) * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} does not need to be called.<br> * <br> * When {@link #setEnabled(boolean)} is called with the <code>enabled</code> set * to FALSE it means you want to disable OOO immediately.<br> * If you don’t want to disable OOO functionality immediately, but rather you * just want to change the time when OOO should stop operating, the sequence * of calls is : {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)}, {@link #free()}.<br> * If OOO is configured to run as a service and {@link #setEnabled(boolean)} is * used to disable, the OOO service will be auto-disabled immediately.<br> * <br> * The summary report will be generated on the first email received after the * disable has been requested, or if no messages are received it will * generated during the nightly router maintenance.<br> * <br> * If OOO is configured as an agent, the user will receive a summary report * and a request to disable the agent on the next scheduled run of the agent will occur. * * @param enabled true to enable */ public void setEnabled(boolean enabled) { checkHandle(); short result = NotesNativeAPI.get().OOOEnable(m_pOOOContext, enabled ? 1 : 0); NotesErrorUtils.checkResult(result); } /** * OOO supports two sets of notification messages.<br> * <br> * They are called General message/subject and Special message/subject.<br> * The rest of the people will receive the general message/subject message.<br> * This function sets the general subject.<br> * If this field is not specified in by this API call, the value defined * using Notes Client will be used, otherwise the default for this field * is the following text <i>AUTO: Katherine Smith is out of the office (returning 02/23/2009 10:12:17 AM)</i>. * * @param subject string that will appear as the subject line of the OOO notification * @param displayReturnDate Boolean which controls whether (“returning <date>”) appears on the subject line */ public void setGeneralSubject(String subject, boolean displayReturnDate) { checkHandle(); Memory subjectMem = NotesStringUtils.toLMBCS(subject, true); short result = NotesNativeAPI.get().OOOSetGeneralSubject(m_pOOOContext, subjectMem, displayReturnDate ? 1 : 0); NotesErrorUtils.checkResult(result); } /** * OOO supports two sets of notification messages.<br> * They are called General message/subject and Special message/subject.<br> * The following text is always appended to the body of the message, where * the "Message subject" is obtained from the message which caused the * notification to be generated.<br> * <i>"Note: This is an automated response to your message "Message subject" * sent on 2/12/2009 10:12:17 AM. This is the only notification you will receive while this person is away."</i> * * @param msg message, max 65535 bytes LMBCS encoded (WORD datatype for length) */ public void setGeneralMessage(String msg) { checkHandle(); Memory msgMem = NotesStringUtils.toLMBCS(msg, false); if (msgMem.size() > 65535) throw new IllegalArgumentException("Message exceeds max length, "+msgMem.size() + "> 65535 bytes"); short result = NotesNativeAPI.get().OOOSetGeneralMessage(m_pOOOContext, msgMem, (short) (msgMem.size() & 0xffff)); NotesErrorUtils.checkResult(result); } } }
domino-jna/src/main/java/com/mindoo/domino/jna/utils/NotesOOOUtils.java
package com.mindoo.domino.jna.utils; import com.mindoo.domino.jna.NotesDatabase; import com.mindoo.domino.jna.NotesDateRange; import com.mindoo.domino.jna.NotesTimeDate; import com.mindoo.domino.jna.errors.NotesError; import com.mindoo.domino.jna.errors.NotesErrorUtils; import com.mindoo.domino.jna.gc.IAllocatedMemory; import com.mindoo.domino.jna.gc.NotesGC; import com.mindoo.domino.jna.internal.DisposableMemory; import com.mindoo.domino.jna.internal.NotesConstants; import com.mindoo.domino.jna.internal.NotesNativeAPI; import com.mindoo.domino.jna.internal.NotesNativeAPI32; import com.mindoo.domino.jna.internal.NotesNativeAPI64; import com.mindoo.domino.jna.internal.structs.NotesTimeDateStruct; import com.sun.jna.Memory; import com.sun.jna.Pointer; import com.sun.jna.ptr.IntByReference; import com.sun.jna.ptr.LongByReference; import com.sun.jna.ptr.PointerByReference; import com.sun.jna.ptr.ShortByReference; /** * Utility class to read and write Out-Of-Office (OOO) information * for a Dominp user. * * @author Karsten Lehmann */ public class NotesOOOUtils { /** * This function should be called prior to performing any OOO operation.<br> * It initializes values for each specific user.<br> * <br> * When you are finished with the logic of a specific operation you are required * to call OOOEndOperation routine.<br> * For example,to check the state of OOO functionality for a specific user you would * call OOOStartOperation, OOOGetState, OOOEndOperation. All strings are LMBCS * strings.<br> * The user is required to have a minimum of Editor level access in the ACL of * their mail file.<br> * <br> * Effeciency Considerations:<br> * For most efficient operation specify all optional parameters (home mail server * and handle to the user’s mail file).<br> * If home mail server is not specified or if the mail file handle is not provided, * this function will look up this information on the server specified in * <code>homeMailServer</code> parameter.<br> * <br> * If that lookup fails it will attempt a look up locally on the server where the * application is running.<br> * <br> * If the second lookup fails and handle to the mail file was provided, then a lookup * on the server where the database is located will be performed. If you would like * to suppress the extra look ups and limit the look up only to the server which * was specified in pMailServer parameter use the following ini variable on the * server where this api/application is running.<br> * <br> * SUPRESS_OOO_DIRECTORY_FAILOVER_LOOKUP = 1<br> * <br> * When multiple lookups are performed it is typically a sign that there is a * configuration problem in the domain and an event indicating this will be logged * to the server console (and DDM).<br> * <br> * This event will be generated 5 or more minutes apart to avoid flooding the server. * * @param mailOwnerName Canonical or abbreviated name of the owner of the mail where we are turning on OOO,Mandatory parameter. * @param homeMailServer Canonical or abbreviated name of the server where the lookup for user information should be made (optional). If the server name is not a home mail server, an attempt will be made to figure out the home mail server by looking first locally and, if configured, in the extended directory. The lookups can be suppressed by providing the server name in <code>homeMailServer</code> parameter and setting the <code>isHomeMailServer</code> parameter to TRUE. Suppressing lookups is a more efficient option. * @param isHomeMailServer TRUE if the <code>homeMailServer</code> is user’s home mail(optional). Set it only if you are sure that user’s home mail server was specified. If FALSE the look up for user’s home mail will be performed. * @param dbMail If the application already has the mail file opened they can pass it in for better better efficiency. * @return OOO context to read or write data */ public static NotesOOOContext startOperation(String mailOwnerName, String homeMailServer, boolean isHomeMailServer, NotesDatabase dbMail) { Memory mailOwnerNameMem = NotesStringUtils.toLMBCS(NotesNamingUtils.toCanonicalName(mailOwnerName), true); Memory homeMailServerMem = NotesStringUtils.toLMBCS(NotesNamingUtils.toCanonicalName(homeMailServer), true); PointerByReference pOOOOContext = new PointerByReference(); short result; result = NotesNativeAPI.get().OOOInit(); NotesErrorUtils.checkResult(result); if (PlatformUtils.is64Bit()) { LongByReference hOOOContext = new LongByReference(); result = NotesNativeAPI64.get().OOOStartOperation(mailOwnerNameMem, homeMailServerMem, isHomeMailServer ? 1 : 0, dbMail==null ? 0 : dbMail.getHandle64(), hOOOContext, pOOOOContext); NotesErrorUtils.checkResult(result); NotesOOOContext ctx = new NotesOOOContext(hOOOContext.getValue(), pOOOOContext.getValue()); NotesGC.__memoryAllocated(ctx); return ctx; } else { IntByReference hOOOContext = new IntByReference(); result = NotesNativeAPI32.get().OOOStartOperation(mailOwnerNameMem, homeMailServerMem, isHomeMailServer ? 1 : 0, dbMail==null ? 0 : dbMail.getHandle32(), hOOOContext, pOOOOContext); NotesErrorUtils.checkResult(result); NotesOOOContext ctx = new NotesOOOContext(hOOOContext.getValue(), pOOOOContext.getValue()); NotesGC.__memoryAllocated(ctx); return ctx; } } public static class NotesOOOContext implements IAllocatedMemory { public static enum OOOType {AGENT, SERVICE} private int m_hOOOContext32; private long m_hOOOContext64; private Pointer m_pOOOContext; private NotesOOOContext(int hOOOContext, Pointer pOOOContext) { if (PlatformUtils.is64Bit()) throw new NotesError(0, "Constructor is 32-bit only"); m_hOOOContext32 = hOOOContext; m_pOOOContext = pOOOContext; } private NotesOOOContext(long hOOOContext, Pointer pOOOContext) { if (!PlatformUtils.is64Bit()) throw new NotesError(0, "Constructor is 64-bit only"); m_hOOOContext64 = hOOOContext; m_pOOOContext = pOOOContext; } @Override public void free() { if (isFreed()) return; if (PlatformUtils.is64Bit()) { short result = NotesNativeAPI64.get().OOOEndOperation(m_hOOOContext64, m_pOOOContext); NotesErrorUtils.checkResult(result); m_hOOOContext64 = 0; } else { short result = NotesNativeAPI32.get().OOOEndOperation(m_hOOOContext32, m_pOOOContext); NotesErrorUtils.checkResult(result); m_hOOOContext32 = 0; } } @Override public boolean isFreed() { return PlatformUtils.is64Bit() ? m_hOOOContext64==0 : m_hOOOContext32==0; } @Override public int getHandle32() { return m_hOOOContext32; } @Override public long getHandle64() { return m_hOOOContext64; } private void checkHandle() { if (PlatformUtils.is64Bit()) { if (m_hOOOContext64==0) throw new NotesError(0, "OOO context already recycled"); NotesGC.__b64_checkValidMemHandle(NotesOOOContext.class, m_hOOOContext64); } else { if (m_hOOOContext32==0) throw new NotesError(0, "OOO context already recycled"); NotesGC.__b32_checkValidMemHandle(NotesOOOContext.class, m_hOOOContext32); } } /** * This function returns time parameters that control OOO. * * @return away period */ public NotesDateRange getAwayPeriod() { checkHandle(); NotesTimeDateStruct tdStartAwayStruct = NotesTimeDateStruct.newInstance(); NotesTimeDateStruct tdEndAwayStruct = NotesTimeDateStruct.newInstance(); short result = NotesNativeAPI.get().OOOGetAwayPeriod(m_pOOOContext, tdStartAwayStruct, tdEndAwayStruct); NotesErrorUtils.checkResult(result); NotesTimeDate tdStartAway = new NotesTimeDate(tdStartAwayStruct); NotesTimeDate tdEndAway = new NotesTimeDate(tdEndAwayStruct); return new NotesDateRange(tdStartAway, tdEndAway); } /** * This function returns a flag which defines how to treat internet emails.<br> * This functional call is optional.<br> * If this flag is set to TRUE OOO notifications will not be generated for<br> * email originating from the internet. The default for this flag is TRUE. * * @return true if excluded */ public boolean isExcludeInternet() { checkHandle(); IntByReference bExcludeInternet = new IntByReference(); short result = NotesNativeAPI.get().OOOGetExcludeInternet(m_pOOOContext, bExcludeInternet); NotesErrorUtils.checkResult(result); return bExcludeInternet.getValue()==1; } /** * Convenience method to check whether the OOO functionality is enabled. Calls * {@link #getState(Ref, Ref)} internally. * * @return true if enabled */ public boolean isEnabled() { Ref<Boolean> retIsEnabled = new Ref<Boolean>(); getState(null, retIsEnabled); return Boolean.TRUE.equals(retIsEnabled.get()); } /** * Convenience method to read which kind of OOO system is used (agent or service). * Calls {@link #getState(Ref, Ref)} internally. * * @return type */ public OOOType getType() { Ref<OOOType> retType = new Ref<OOOType>(); getState(retType, null); return retType.get(); } /** * This function returns the version (agent, service) and the state (disabled, enabled) * of the out of office functionality.<br> * The version information can be used to show or hide UI elements that might not be * supported for a given version.<br> * For example, the agent does not support durations of less than 1 day and some * clients might choose not to show the hours in the user interface.<br> * When you need to make {@link #getState(Ref, Ref)} as efficient as possible, call * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)} * with the home mail server and the opened mail database.<br> * This function is read only and does not return an error if user ACL rights * are below Editor (which are required to turn on/off the Out of office functionality).<br> * If {@link #getState(Ref, Ref)} is called immediately following OOOEnable it will * not reflect the state set by the OOOEnable.<br> * To see the current state call {@link #free()} and start a new operation using * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link NotesOOOContext#getState(Ref, Ref)} and {@link #free()}. * @param retType returns the type of the OOO system (agent or service) * @param retIsEnabled returns whether the service is enabled for the user */ public void getState(Ref<OOOType> retType, Ref<Boolean> retIsEnabled) { checkHandle(); ShortByReference retVersion = new ShortByReference(); ShortByReference retState = new ShortByReference(); short result = NotesNativeAPI.get().OOOGetState(m_pOOOContext, retVersion, retState); NotesErrorUtils.checkResult(result); if (retType!=null) { if (retVersion.getValue() == 1) { retType.set(OOOType.AGENT); } else if (retVersion.getValue() == 2) { retType.set(OOOType.SERVICE); } } if (retIsEnabled!=null) { if (retState.getValue()==1) { retIsEnabled.set(Boolean.TRUE); } else { retIsEnabled.set(Boolean.FALSE); } } } /** * OOO supports two sets of messages.<br> * <br> * They are called General message/subject and Special message/subject.<br> * This function gets the general subject.<br> * This is string that will appear as the subject line of the OOO notification. * * @return subject */ public String getGeneralSubject() { checkHandle(); DisposableMemory retSubject = new DisposableMemory(NotesConstants.OOOPROF_MAX_BODY_SIZE); try { short result = NotesNativeAPI.get().OOOGetGeneralSubject(m_pOOOContext, retSubject); NotesErrorUtils.checkResult(result); String subject = NotesStringUtils.fromLMBCS(retSubject, -1); return subject; } finally { retSubject.dispose(); } } /** * OOO supports two sets of messages. They are called General message/subject and * Special message/subject.<br> * This function returns the text of the general message. * * @return message */ public String getGeneralMessage() { checkHandle(); //first get the length ShortByReference retGeneralMessageLen = new ShortByReference(); short result = NotesNativeAPI.get().OOOGetGeneralMessage(m_pOOOContext, null, retGeneralMessageLen); NotesErrorUtils.checkResult(result); int iGeneralMessageLen = (int) (retGeneralMessageLen.getValue() & 0xffff); if (iGeneralMessageLen==0) return ""; DisposableMemory retMessage = new DisposableMemory(iGeneralMessageLen + 1); try { result = NotesNativeAPI.get().OOOGetGeneralMessage(m_pOOOContext, retMessage, retGeneralMessageLen); NotesErrorUtils.checkResult(result); String msg = NotesStringUtils.fromLMBCS(retMessage, retGeneralMessageLen.getValue()); return msg; } finally { retMessage.dispose(); } } /** * This function validates and sets the time parameters that control OOO.<br> * <br> * This information is required for enabling the OOO.<br> * If you want turn on OOO functionality for a given period of time the * sequence of calls needed is:<br> * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)}, {@link #setEnabled(boolean)} * and {@link #free()}.<br> * <br> * When you need to enable OOO (i.e. call it with <code>enabled</code> flag set to TRUE) * you should call {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} prior to calling * {@link #setEnabled(boolean)}.<br> * <br> * If you need to change the length of the away period after OOO has already been * enabled, the sequence of calls needed to perform this action is * {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)}, {@link #free()}.<br> * <br> * If the Domino server is configured to run an OOO agent, it can only be turned on * for full days, the time portion of the date parameter will not be used. * * @param tdStartAway This is date and time when Out of office will begin. * @param tdEndAway This is date and time when Out of office will end. */ public void setAwayPeriod(NotesTimeDate tdStartAway, NotesTimeDate tdEndAway) { checkHandle(); NotesTimeDateStruct.ByValue tdStartWayByVal = NotesTimeDateStruct.ByValue.newInstance(tdStartAway.getInnards()); NotesTimeDateStruct.ByValue tdEndWayByVal = NotesTimeDateStruct.ByValue.newInstance(tdEndAway.getInnards()); short result = NotesNativeAPI.get().OOOSetAwayPeriod(m_pOOOContext, tdStartWayByVal, tdEndWayByVal); NotesErrorUtils.checkResult(result); } /** * This function changes the state of the OOO functionality as indicated by * the <code>enabled</code> variable.<br> * If the OOO functionality is already in the state indicated by the * <code>enabled</code> flag, this function does nothing.<br> * <br> * When you need to enable OOO (i.e. call it with <code>enabled</code> flag set * to TRUE) you should call {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} * prior to calling {@link #setEnabled(boolean)}.<br> * <br> * If {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} is not called, * {@link #setEnabled(boolean)} will use the previous value for start and end.<br> * <br> * If they are in the past then the OOO functionality will not be enabled.<br> * When you need to disable OOO (i.e. call it with <code>enabled</code> set to FALSE) * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)} does not need to be called.<br> * <br> * When {@link #setEnabled(boolean)} is called with the <code>enabled</code> set * to FALSE it means you want to disable OOO immediately.<br> * If you don’t want to disable OOO functionality immediately, but rather you * just want to change the time when OOO should stop operating, the sequence * of calls is : {@link NotesOOOUtils#startOperation(String, String, boolean, NotesDatabase)}, * {@link #setAwayPeriod(NotesTimeDate, NotesTimeDate)}, {@link #free()}.<br> * If OOO is configured to run as a service and {@link #setEnabled(boolean)} is * used to disable, the OOO service will be auto-disabled immediately.<br> * <br> * The summary report will be generated on the first email received after the * disable has been requested, or if no messages are received it will * generated during the nightly router maintenance.<br> * <br> * If OOO is configured as an agent, the user will receive a summary report * and a request to disable the agent on the next scheduled run of the agent will occur. * * @param enabled true to enable */ public void setEnabled(boolean enabled) { checkHandle(); short result = NotesNativeAPI.get().OOOEnable(m_pOOOContext, enabled ? 1 : 0); NotesErrorUtils.checkResult(result); } /** * OOO supports two sets of notification messages.<br> * <br> * They are called General message/subject and Special message/subject.<br> * The rest of the people will receive the general message/subject message.<br> * This function sets the general subject.<br> * If this field is not specified in by this API call, the value defined * using Notes Client will be used, otherwise the default for this field * is the following text <i>AUTO: Katherine Smith is out of the office (returning 02/23/2009 10:12:17 AM)</i>. * * @param subject string that will appear as the subject line of the OOO notification * @param displayReturnDate Boolean which controls whether (“returning <date>”) appears on the subject line */ public void setGeneralSubject(String subject, boolean displayReturnDate) { checkHandle(); Memory subjectMem = NotesStringUtils.toLMBCS(subject, true); short result = NotesNativeAPI.get().OOOSetGeneralSubject(m_pOOOContext, subjectMem, displayReturnDate ? 1 : 0); NotesErrorUtils.checkResult(result); } /** * OOO supports two sets of notification messages.<br> * They are called General message/subject and Special message/subject.<br> * The following text is always appended to the body of the message, where * the "Message subject" is obtained from the message which caused the * notification to be generated.<br> * <i>"Note: This is an automated response to your message "Message subject" * sent on 2/12/2009 10:12:17 AM. This is the only notification you will receive while this person is away."</i> * * @param msg message, max 65535 bytes LMBCS encoded (WORD datatype for length) */ public void setGeneralMessage(String msg) { checkHandle(); Memory msgMem = NotesStringUtils.toLMBCS(msg, false); if (msgMem.size() > 65535) throw new IllegalArgumentException("Message exceeds max length, "+msgMem.size() + "> 65535 bytes"); short result = NotesNativeAPI.get().OOOSetGeneralMessage(m_pOOOContext, msgMem, (short) (msgMem.size() & 0xffff)); NotesErrorUtils.checkResult(result); } } }
Added OOO method setExcludeInternet(boolean exclude)
domino-jna/src/main/java/com/mindoo/domino/jna/utils/NotesOOOUtils.java
Added OOO method setExcludeInternet(boolean exclude)
Java
apache-2.0
24266544672bfd193ee254dd78a7bbcf290550c5
0
eayun/ovirt-engine,walteryang47/ovirt-engine,halober/ovirt-engine,zerodengxinchao/ovirt-engine,eayun/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine,yapengsong/ovirt-engine,zerodengxinchao/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,halober/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,OpenUniversity/ovirt-engine,eayun/ovirt-engine,walteryang47/ovirt-engine,halober/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,halober/ovirt-engine,zerodengxinchao/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,yapengsong/ovirt-engine
package org.ovirt.engine.core.bll.eventqueue; import java.util.LinkedList; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import java.util.concurrent.locks.ReentrantLock; import javax.ejb.ConcurrencyManagement; import javax.ejb.ConcurrencyManagementType; import javax.ejb.Local; import javax.ejb.Singleton; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import org.ovirt.engine.core.common.eventqueue.Event; import org.ovirt.engine.core.common.eventqueue.EventQueue; import org.ovirt.engine.core.common.eventqueue.EventResult; import org.ovirt.engine.core.common.eventqueue.EventType; import org.ovirt.engine.core.common.utils.Pair; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; import org.ovirt.engine.core.utils.threadpool.ThreadPoolUtil; @Singleton(name = "EventQueue") @ConcurrencyManagement(ConcurrencyManagementType.BEAN) @TransactionAttribute(TransactionAttributeType.SUPPORTS) @Local(EventQueue.class) public class EventQueueMonitor implements EventQueue { private static Log log = LogFactory.getLog(EventQueueMonitor.class); private static final ConcurrentMap<Guid, ReentrantLock> poolsLockMap = new ConcurrentHashMap<Guid, ReentrantLock>(); private static final Map<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>> poolsEventsMap = new ConcurrentHashMap<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>>(); private static final Map<Guid, Event> poolCurrentEventMap = new ConcurrentHashMap<Guid, Event>(); @Override public void submitEventAsync(Event event, Callable<EventResult> callable) { submitTaskInternal(event, callable); } @Override public EventResult submitEventSync(Event event, Callable<EventResult> callable) { FutureTask<EventResult> task = submitTaskInternal(event, callable); if (task != null) { try { return task.get(); } catch (Exception e) { log.errorFormat("Failed at submitEventSync, for pool {0} with exception {1}", event.getStoragePoolId(), e); } } return null; } private FutureTask<EventResult> submitTaskInternal(Event event, Callable<EventResult> callable) { FutureTask<EventResult> task = null; Guid storagePoolId = event.getStoragePoolId(); ReentrantLock lock = getPoolLock(storagePoolId); lock.lock(); try { Event currentEvent = poolCurrentEventMap.get(storagePoolId); if (currentEvent != null) { switch (currentEvent.getEventType()) { case RECONSTRUCT: if (event.getEventType() == EventType.VDSCONNECTTOPOOL || event.getEventType() == EventType.RECOVERY) { task = addTaskToQueue(event, callable, storagePoolId, isEventShouldBeFirst(event)); } else { log.debugFormat("Current event was skiped because of reconstruct is running now for pool {0}, event {1}", storagePoolId, event); } break; default: task = addTaskToQueue(event, callable, storagePoolId, isEventShouldBeFirst(event)); break; } } else { task = addTaskToQueue(event, callable, storagePoolId, false); poolCurrentEventMap.put(storagePoolId, event); ThreadPoolUtil.execute(new InternalEventQueueThread(storagePoolId, lock, poolsEventsMap, poolCurrentEventMap)); } } finally { lock.unlock(); } return task; } /** * The following method should decide if we want that the event will be first for executing, before all other events * already submitted to queue * @param event * - submitted event * @return */ private boolean isEventShouldBeFirst(Event event) { return event.getEventType() == EventType.RECOVERY; } private FutureTask<EventResult> addTaskToQueue(Event event, Callable<EventResult> callable, Guid storagePoolId, boolean addFirst) { FutureTask<EventResult> task = new FutureTask<EventResult>(callable); Pair<Event, FutureTask<EventResult>> queueEvent = new Pair<Event, FutureTask<EventResult>>(event, task); if (addFirst) { getEventQueue(storagePoolId).addFirst(queueEvent); } else { getEventQueue(storagePoolId).add(queueEvent); } return task; } private LinkedList<Pair<Event, FutureTask<EventResult>>> getEventQueue(Guid storagePoolId) { LinkedList<Pair<Event, FutureTask<EventResult>>> queue = poolsEventsMap.get(storagePoolId); if (queue == null) { queue = new LinkedList<Pair<Event, FutureTask<EventResult>>>(); poolsEventsMap.put(storagePoolId, queue); } return queue; } private ReentrantLock getPoolLock(Guid poolId) { if (!poolsLockMap.containsKey(poolId)) { poolsLockMap.putIfAbsent(poolId, new ReentrantLock()); } return poolsLockMap.get(poolId); } private static class InternalEventQueueThread implements Runnable { private Guid storagePoolId; private ReentrantLock lock; private Map<Guid, Event> poolCurrentEventMap; private Map<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>> poolsEventsMap; public InternalEventQueueThread(Guid storagePoolId, ReentrantLock lock, Map<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>> poolsEventsMap, Map<Guid, Event> poolCurrentEventMap) { this.storagePoolId = storagePoolId; this.lock = lock; this.poolsEventsMap = poolsEventsMap; this.poolCurrentEventMap = poolCurrentEventMap; } @Override public void run() { while (true) { Pair<Event, FutureTask<EventResult>> pair; lock.lock(); try { pair = poolsEventsMap.get(storagePoolId).poll(); if (pair != null) { poolCurrentEventMap.put(storagePoolId, pair.getFirst()); } else { poolCurrentEventMap.remove(storagePoolId); poolsEventsMap.remove(storagePoolId); log.debugFormat("All task for event query were executed pool {0}", storagePoolId); break; } } finally { lock.unlock(); } Future<EventResult> futureResult = ThreadPoolUtil.execute(pair.getSecond()); try { if (futureResult.get() == null) { EventResult result = pair.getSecond().get(); if (result != null && result.getEventType() == EventType.RECONSTRUCT) { log.infoFormat("Finished reconstruct for pool {0}. Clearing event queue", storagePoolId); lock.lock(); try { LinkedList<Pair<Event, FutureTask<EventResult>>> queue = new LinkedList<Pair<Event, FutureTask<EventResult>>>(); for (Pair<Event, FutureTask<EventResult>> task : poolsEventsMap.get(storagePoolId)) { EventType eventType = task.getFirst().getEventType(); if (eventType == EventType.VDSCONNECTTOPOOL || (eventType == EventType.RECOVERY && !result.isSuccess())) { queue.add(task); } else { log.infoFormat("The following operation {0} was cancelled, because of recosntruct was run before", task.getFirst()); task.getSecond().cancel(true); } } if (queue.isEmpty()) { poolCurrentEventMap.remove(storagePoolId); poolsEventsMap.remove(storagePoolId); break; } else { poolsEventsMap.put(storagePoolId, queue); } } finally { lock.unlock(); } } } } catch (Exception e) { log.errorFormat("Exception during process of events for pool {0}, error is {1}", storagePoolId, e.getMessage()); } } } } }
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/eventqueue/EventQueueMonitor.java
package org.ovirt.engine.core.bll.eventqueue; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import java.util.concurrent.locks.ReentrantLock; import javax.ejb.ConcurrencyManagement; import javax.ejb.ConcurrencyManagementType; import javax.ejb.Local; import javax.ejb.Singleton; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import org.ovirt.engine.core.common.eventqueue.Event; import org.ovirt.engine.core.common.eventqueue.EventQueue; import org.ovirt.engine.core.common.eventqueue.EventResult; import org.ovirt.engine.core.common.eventqueue.EventType; import org.ovirt.engine.core.common.utils.Pair; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; import org.ovirt.engine.core.utils.threadpool.ThreadPoolUtil; @Singleton(name = "EventQueue") @ConcurrencyManagement(ConcurrencyManagementType.BEAN) @TransactionAttribute(TransactionAttributeType.SUPPORTS) @Local(EventQueue.class) public class EventQueueMonitor implements EventQueue { private static Log log = LogFactory.getLog(EventQueueMonitor.class); private static final ConcurrentMap<Guid, ReentrantLock> poolsLockMap = new ConcurrentHashMap<Guid, ReentrantLock>(); private static final Map<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>> poolsEventsMap = new HashMap<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>>(); private static final Map<Guid, Event> poolCurrentEventMap = new HashMap<Guid, Event>(); @Override public void submitEventAsync(Event event, Callable<EventResult> callable) { submitTaskInternal(event, callable); } @Override public EventResult submitEventSync(Event event, Callable<EventResult> callable) { FutureTask<EventResult> task = submitTaskInternal(event, callable); if (task != null) { try { return task.get(); } catch (Exception e) { log.errorFormat("Failed at submitEventSync, for pool {0} with exception {1}", event.getStoragePoolId(), e); } } return null; } private FutureTask<EventResult> submitTaskInternal(Event event, Callable<EventResult> callable) { FutureTask<EventResult> task = null; Guid storagePoolId = event.getStoragePoolId(); ReentrantLock lock = getPoolLock(storagePoolId); lock.lock(); try { Event currentEvent = poolCurrentEventMap.get(storagePoolId); if (currentEvent != null) { switch (currentEvent.getEventType()) { case RECONSTRUCT: if (event.getEventType() == EventType.VDSCONNECTTOPOOL || event.getEventType() == EventType.RECOVERY) { task = addTaskToQueue(event, callable, storagePoolId, isEventShouldBeFirst(event)); } else { log.debugFormat("Current event was skiped because of reconstruct is running now for pool {0}, event {1}", storagePoolId, event); } break; default: task = addTaskToQueue(event, callable, storagePoolId, isEventShouldBeFirst(event)); break; } } else { task = addTaskToQueue(event, callable, storagePoolId, false); poolCurrentEventMap.put(storagePoolId, event); ThreadPoolUtil.execute(new InternalEventQueueThread(storagePoolId, lock, poolsEventsMap, poolCurrentEventMap)); } } finally { lock.unlock(); } return task; } /** * The following method should decide if we want that the event will be first for executing, before all other events * already submitted to queue * @param event * - submitted event * @return */ private boolean isEventShouldBeFirst(Event event) { return event.getEventType() == EventType.RECOVERY; } private FutureTask<EventResult> addTaskToQueue(Event event, Callable<EventResult> callable, Guid storagePoolId, boolean addFirst) { FutureTask<EventResult> task = new FutureTask<EventResult>(callable); Pair<Event, FutureTask<EventResult>> queueEvent = new Pair<Event, FutureTask<EventResult>>(event, task); if (addFirst) { getEventQueue(storagePoolId).addFirst(queueEvent); } else { getEventQueue(storagePoolId).add(queueEvent); } return task; } private LinkedList<Pair<Event, FutureTask<EventResult>>> getEventQueue(Guid storagePoolId) { LinkedList<Pair<Event, FutureTask<EventResult>>> queue = poolsEventsMap.get(storagePoolId); if (queue == null) { queue = new LinkedList<Pair<Event, FutureTask<EventResult>>>(); poolsEventsMap.put(storagePoolId, queue); } return queue; } private ReentrantLock getPoolLock(Guid poolId) { if (!poolsLockMap.containsKey(poolId)) { poolsLockMap.putIfAbsent(poolId, new ReentrantLock()); } return poolsLockMap.get(poolId); } private static class InternalEventQueueThread implements Runnable { private Guid storagePoolId; private ReentrantLock lock; private Map<Guid, Event> poolCurrentEventMap; private Map<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>> poolsEventsMap; public InternalEventQueueThread(Guid storagePoolId, ReentrantLock lock, Map<Guid, LinkedList<Pair<Event, FutureTask<EventResult>>>> poolsEventsMap, Map<Guid, Event> poolCurrentEventMap) { this.storagePoolId = storagePoolId; this.lock = lock; this.poolsEventsMap = poolsEventsMap; this.poolCurrentEventMap = poolCurrentEventMap; } @Override public void run() { while (true) { Pair<Event, FutureTask<EventResult>> pair; lock.lock(); try { pair = poolsEventsMap.get(storagePoolId).poll(); if (pair != null) { poolCurrentEventMap.put(storagePoolId, pair.getFirst()); } else { poolCurrentEventMap.remove(storagePoolId); poolsEventsMap.remove(storagePoolId); log.debugFormat("All task for event query were executed pool {0}", storagePoolId); break; } } finally { lock.unlock(); } Future<EventResult> futureResult = ThreadPoolUtil.execute(pair.getSecond()); try { if (futureResult.get() == null) { EventResult result = pair.getSecond().get(); if (result != null && result.getEventType() == EventType.RECONSTRUCT) { log.infoFormat("Finished reconstruct for pool {0}. Clearing event queue", storagePoolId); lock.lock(); try { LinkedList<Pair<Event, FutureTask<EventResult>>> queue = new LinkedList<Pair<Event, FutureTask<EventResult>>>(); for (Pair<Event, FutureTask<EventResult>> task : poolsEventsMap.get(storagePoolId)) { EventType eventType = task.getFirst().getEventType(); if (eventType == EventType.VDSCONNECTTOPOOL || (eventType == EventType.RECOVERY && !result.isSuccess())) { queue.add(task); } else { log.infoFormat("The following operation {0} was cancelled, because of recosntruct was run before", task.getFirst()); task.getSecond().cancel(true); } } if (queue.isEmpty()) { poolCurrentEventMap.remove(storagePoolId); poolsEventsMap.remove(storagePoolId); break; } else { poolsEventsMap.put(storagePoolId, queue); } } finally { lock.unlock(); } } } } catch (Exception e) { log.errorFormat("Exception during process of events for pool {0}, error is {1}", storagePoolId, e.getMessage()); } } } } }
engine: Fixing possible ConcurrentModificationException at EventQueueMonitor The following exception can ocurred during change of shared data for different pools. Fixing it Change-Id: I5898a7783ae8246de982c2c74b43352887026886 Signed-off-by: Michael Kublin <[email protected]>
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/eventqueue/EventQueueMonitor.java
engine: Fixing possible ConcurrentModificationException at EventQueueMonitor
Java
apache-2.0
6637ad13043e2a3efbd0a1c18794823c19de1877
0
baomidou/mybatis-plus,baomidou/mybatis-plus
package com.baomidou.mybatisplus.generator.config.querys; /** * <p>Oscar(神通数据库) 表数据查询</p> * * @author whli * @version 1.0.0 * @since 2020/7/28 18:54 */ public class OscarQuery extends AbstractDbQuery { @Override public String tablesSql() { return "SELECT * FROM (SELECT " + "a.TABLE_NAME, " + "b.COMMENTS " + "FROM USER_TABLES a " + "INNER JOIN USER_TAB_COMMENTS b ON (b.TABLE_TYPE = 'TABLE' AND a.TABLE_NAME = b.TABLE_NAME)) a WHERE 1=1 "; } @Override public String tableFieldsSql() { return "SELECT " + "T1.COLUMN_NAME, " + "T1.DATA_TYPE, " + "T2.COMMENTS, " + "CASE WHEN T3.CONSTRAINT_TYPE = 'P' THEN 'PRI' " + "ELSE '' END KEY " + "FROM USER_TAB_COLUMNS T1 " + "INNER JOIN USER_COL_COMMENTS T2 ON (T1.COLUMN_NAME = T2.COLUMN_NAME) " + "LEFT JOIN(SELECT a.TABLE_NAME,b.COLUMN_NAME,a.CONSTRAINT_TYPE FROM USER_CONSTRAINTS a, USER_IND_COLUMNS b " + "WHERE a.CONSTRAINT_TYPE = 'P' AND a.INDEX_NAME = b.INDEX_NAME) T3 ON (T1.TABLE_NAME = T3.TABLE_NAME AND T1.COLUMN_NAME = T3.COLUMN_NAME) " + "WHERE T1.TABLE_NAME = '%s' " + "GROUP BY T1.COLUMN_NAME,T1.DATA_TYPE,T2.COMMENTS,T3.CONSTRAINT_TYPE "; } @Override public String tableName() { return "TABLE_NAME"; } @Override public String tableComment() { return "COMMENTS"; } @Override public String fieldName() { return "COLUMN_NAME"; } @Override public String fieldType() { return "DATA_TYPE"; } @Override public String fieldComment() { return "COMMENTS"; } @Override public String fieldKey() { return "KEY"; } }
mybatis-plus-generator/src/main/java/com/baomidou/mybatisplus/generator/config/querys/OscarQuery.java
package com.baomidou.mybatisplus.generator.config.querys; /** * <p></p> * * @author whli * @version 1.0.0 * @since 2020/7/28 18:54 */ public class OscarQuery extends AbstractDbQuery { @Override public String tablesSql() { return "SELECT " + "a.TABLE_NAME, " + "b.COMMENTS " + "FROM USER_TABLES a " + "INNER JOIN USER_TAB_COMMENTS b ON (b.TABLE_TYPE = 'TABLE' AND a.TABLE_NAME = b.TABLE_NAME) "; } @Override public String tableFieldsSql() { return "SELECT " + "T1.COLUMN_NAME, " + "T1.DATA_TYPE, " + "T2.COMMENTS, " + "CASE WHEN T3.CONSTRAINT_TYPE = 'P' THEN 'PRI' " + "ELSE '' END KEY " + "FROM USER_TAB_COLUMNS T1 " + "INNER JOIN USER_COL_COMMENTS T2 ON (T1.COLUMN_NAME = T2.COLUMN_NAME) " + "LEFT JOIN(SELECT a.TABLE_NAME,b.COLUMN_NAME,a.CONSTRAINT_TYPE FROM USER_CONSTRAINTS a, USER_IND_COLUMNS b " + "WHERE a.CONSTRAINT_TYPE = 'P' AND a.INDEX_NAME = b.INDEX_NAME) T3 ON (T1.TABLE_NAME = T3.TABLE_NAME AND T1.COLUMN_NAME = T3.COLUMN_NAME) " + "WHERE T1.TABLE_NAME = '%s' " + "GROUP BY T1.COLUMN_NAME,T1.DATA_TYPE,T2.COMMENTS,T3.CONSTRAINT_TYPE "; } @Override public String tableName() { return "TABLE_NAME"; } @Override public String tableComment() { return "COMMENTS"; } @Override public String fieldName() { return "COLUMN_NAME"; } @Override public String fieldType() { return "DATA_TYPE"; } @Override public String fieldComment() { return "COMMENTS"; } @Override public String fieldKey() { return "KEY"; } }
修复Oscar(神通数据库)生成错误.
mybatis-plus-generator/src/main/java/com/baomidou/mybatisplus/generator/config/querys/OscarQuery.java
修复Oscar(神通数据库)生成错误.
Java
apache-2.0
24d70beda4579ddc2989829c92a7cdbfe7e8c32f
0
rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny
package org.rabix.tests; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.io.FileUtils; import org.rabix.common.helper.JSONHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TestRunner { private static String testDirPath; private static String cmdPrefix; private static String resultPath = "./rabix-backend-local/target/result.yaml"; private static String workingdir = "./rabix-backend-local/target/"; private static final Logger logger = LoggerFactory.getLogger(TestRunner.class); public static void main(String[] commandLineArguments) { try { logger.info("Testing started..."); PropertiesConfiguration configuration = getConfig(); testDirPath = getStringFromConfig(configuration, "testDirPath"); cmdPrefix = getStringFromConfig(configuration, "cmdPrefix"); startTestExecution(); } catch (RabixTestException e) { logger.error("Error occuerred!", e); System.exit(-1); } } private static void startTestExecution() throws RabixTestException { boolean allTestsPassed = true; boolean testPassed = false; File dir = new File(testDirPath); File[] directoryListing = dir.listFiles(); ArrayList<Object> failedTests = new ArrayList<Object>(); if (!dir.isDirectory()) { logger.error("Problem with test directory path: Test directory path is not valid directory path."); System.exit(-1); } if (directoryListing == null) { logger.error("Problem with provided test directory: Test directory is empty."); } logger.info("Extracting jar file"); command("tar -zxvf " + System.getProperty("user.dir") + "/rabix-backend-local/target/rabix-backend-local-0.6.1-SNAPSHOT-id3.tar.gz", workingdir); command("cp -a " + System.getProperty("user.dir") + "/rabix-integration-testing/testbacklog .", workingdir); for (File child : directoryListing) { if (!child.getPath().endsWith(".test.yaml")) continue; try { String currentTest = readFile(child.getAbsolutePath(), Charset.defaultCharset()); Map<String, Object> inputSuite = JSONHelper.readMap(JSONHelper.transformToJSON(currentTest)); Iterator entries = inputSuite.entrySet().iterator(); while (entries.hasNext()) { Entry thisEntry = (Entry) entries.next(); Object testName = thisEntry.getKey(); Object test = thisEntry.getValue(); logger.info("Running test: " + testName + " with given parameters:"); @SuppressWarnings({ "rawtypes", "unchecked" }) Map<String, Map<String, LinkedHashMap>> mapTest = (Map<String, Map<String, LinkedHashMap>>) test; logger.info(" app: " + mapTest.get("app")); logger.info(" inputs: " + mapTest.get("inputs")); logger.info(" expected: " + mapTest.get("expected")); String cmd = cmdPrefix + " " + mapTest.get("app") + " " + mapTest.get("inputs") + " > result.yaml"; logger.info("->Running cmd: " + cmd); command(cmd, workingdir); File resultFile = new File(resultPath); String resultText = readFile(resultFile.getAbsolutePath(), Charset.defaultCharset()); Map<String, Object> resultData = JSONHelper.readMap(JSONHelper.transformToJSON(resultText)); logger.info("\nGenerated result file:"); logger.info(resultText); testPassed = validateTestCase(mapTest, resultData); logger.info("Test result: "); if (testPassed) { logger.info(testName + " PASSED"); } else { logger.info(testName + " FAILED"); failedTests.add(testName); allTestsPassed = false; } } } catch (IOException e) { logger.error("Test suite execution failed. ", e); System.exit(-1); } } if (allTestsPassed) { logger.info("Test suite passed successfully."); } else { logger.info("Test suite failed."); logger.info("Failed test number: " + failedTests.size()); logger.info("Failed tests:"); for (Object test : failedTests) { logger.info(test.toString()); } } } private static boolean validateTestCase(Map<String, Map<String, LinkedHashMap>> mapTest, Map<String, Object> resultData) { String resultFileName; int resultFileSize; String resultFileClass; Map<String, Object> resultValues = ((Map<String, Object>) resultData.get("outfile")); resultFileName = resultValues.get("path").toString(); resultFileName = resultFileName.split("/")[resultFileName.split("/").length - 1]; resultFileSize = (int) resultValues.get("size"); resultFileClass = resultValues.get("class").toString(); logger.info("Test validation:"); logger.info("result file name: " + resultFileName + ", expected file name: " + mapTest.get("expected").get("outfile").get("name")); logger.info("result file size: " + resultFileSize + ", expected file size: " + mapTest.get("expected").get("outfile").get("size")); logger.info("result file class: " + resultFileClass + ", expected file class: " + mapTest.get("expected").get("outfile").get("class")); boolean fileNamesEqual = resultFileName.equals(mapTest.get("expected").get("outfile").get("name")); boolean fileSizesEqual = resultFileSize == (int) mapTest.get("expected").get("outfile").get("size"); boolean fileClassesEqual = resultFileClass.equals(mapTest.get("expected").get("outfile").get("class")); if (!fileNamesEqual) { logger.error("result and expected file name are not equal!"); } else { if (!fileSizesEqual) { logger.error("result and expected file size are not equal!"); } else { if (!fileClassesEqual) { logger.error("result and expected file class are not equal!"); } else { logger.info("Test case passed."); return true; } } } return false; } public static void command(final String cmdline, final String directory) throws RabixTestException { try { Process process = new ProcessBuilder(new String[] { "bash", "-c", cmdline }).inheritIO() .directory(new File(directory)).start(); BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream())); String line = null; while ((line = br.readLine()) != null) logger.info(line); int exitCode = process.waitFor(); if (0 != exitCode) { throw new RabixTestException("Error while executing command: Non zero exit code " + exitCode); } } catch (Exception e) { logger.error("Error while executing command. ", e); throw new RabixTestException("Error while executing command: " + e.getMessage()); } } /** * Reads content from a file */ static String readFile(String path, Charset encoding) throws IOException { byte[] encoded = Files.readAllBytes(Paths.get(path)); return new String(encoded, encoding); } @SuppressWarnings("unchecked") private static PropertiesConfiguration getConfig() throws RabixTestException { PropertiesConfiguration configuration = new PropertiesConfiguration(); String userDir = System.getProperty("user.dir"); if (userDir == null) { throw new RabixTestException("null value for user.dir property"); } File configDir = new File(userDir + "/rabix-integration-testing/config/test"); try { Iterator<File> iterator = FileUtils.iterateFiles(configDir, new String[] { "properties" }, true); while (iterator.hasNext()) { File configFile = iterator.next(); configuration.load(configFile); } return configuration; } catch (ConfigurationException e) { logger.error("Failed to load configuration properties", e); throw new RabixTestException("Failed to load configuration properties"); } } private static String getStringFromConfig(PropertiesConfiguration configuration, String key) { return configuration.getString(key); } }
rabix-integration-testing/src/main/java/org/rabix/tests/TestRunner.java
package org.rabix.tests; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.io.FileUtils; import org.rabix.common.helper.JSONHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TestRunner { private static String testDirPath; private static String cmdPrefix; private static String resultPath = "./rabix-backend-local/target/result.yaml"; private static String workingdir = "./rabix-backend-local/target/"; private static final Logger logger = LoggerFactory.getLogger(TestRunner.class); public static void main(String[] commandLineArguments) { try { logger.info("Testing started..."); PropertiesConfiguration configuration = getConfig(); testDirPath = getStringFromConfig(configuration, "testDirPath"); cmdPrefix = getStringFromConfig(configuration, "cmdPrefix"); startTestExecution(); } catch (RabixTestException e) { logger.error("Error occuerred!", e); System.exit(-1); } } private static void startTestExecution() throws RabixTestException { boolean allTestsPassed = true; boolean testPassed = false; File dir = new File(testDirPath); File[] directoryListing = dir.listFiles(); ArrayList<Object> failedTests = new ArrayList<Object>(); if (!dir.isDirectory()) { logger.error("Problem with test directory path: Test directory path is not valid directory path."); System.exit(-1); } if (directoryListing == null) { logger.error("Problem with provided test directory: Test directory is empty."); } logger.info("Extracting jar file"); command("tar -zxvf " + System.getProperty("user.dir") + "/rabix-backend-local/target/rabix-backend-local-0.6.1-SNAPSHOT-id3.tar.gz", workingdir); command("cp -a " + System.getProperty("user.dir") + "/rabix-integration-testing/testbacklog .", workingdir); for (File child : directoryListing) { if (!child.getPath().endsWith(".test.yaml")) continue; try { String currentTest = readFile(child.getAbsolutePath(), Charset.defaultCharset()); Map<String, Object> inputSuite = JSONHelper.readMap(JSONHelper.transformToJSON(currentTest)); Iterator entries = inputSuite.entrySet().iterator(); while (entries.hasNext()) { Entry thisEntry = (Entry) entries.next(); Object testName = thisEntry.getKey(); Object test = thisEntry.getValue(); logger.info("Running test: " + testName + " with given parameters:"); @SuppressWarnings({ "rawtypes", "unchecked" }) Map<String, Map<String, LinkedHashMap>> mapTest = (Map<String, Map<String, LinkedHashMap>>) test; logger.info(" app: " + mapTest.get("app")); logger.info(" inputs: " + mapTest.get("inputs")); logger.info(" expected: " + mapTest.get("expected")); String cmd = cmdPrefix + " " + mapTest.get("app") + " " + mapTest.get("inputs") + " > result.yaml"; logger.info("->Running cmd: " + cmd); command(cmd, workingdir); File resultFile = new File(resultPath); String resultText = readFile(resultFile.getAbsolutePath(), Charset.defaultCharset()); Map<String, Object> resultData = JSONHelper.readMap(JSONHelper.transformToJSON(resultText)); logger.info("\nGenerated result file:"); logger.info(resultText); testPassed = validateTestCase(mapTest, resultData); logger.info("Test result: "); if (testPassed) { logger.info(testName + " PASSED"); } else { logger.info(testName + " FAILED"); failedTests.add(testName); allTestsPassed = false; } } } catch (IOException e) { logger.error("Test suite execution failed. ", e); System.exit(-1); } } if (allTestsPassed) { logger.info("Test suite passed successfully."); } else { logger.info("Test suite failed."); logger.info("Failed test number: " + failedTests.size()); logger.info("Failed tests:"); for (Object test : failedTests) { logger.info(test.toString()); } } } private static boolean validateTestCase(Map<String, Map<String, LinkedHashMap>> mapTest, Map<String, Object> resultData) { String resultFileName; int resultFileSize; String resultFileClass; Map<String, Object> resultValues = ((Map<String, Object>) resultData.get("outfile")); resultFileName = resultValues.get("path").toString(); resultFileName = resultFileName.split("/")[resultFileName.split("/").length - 1]; resultFileSize = (int) resultValues.get("size"); resultFileClass = resultValues.get("class").toString(); logger.info("Test validation:"); logger.info("result file name: " + resultFileName + ", expected file name: " + mapTest.get("expected").get("outfile").get("name")); logger.info("result file size: " + resultFileSize + ", expected file size: " + mapTest.get("expected").get("outfile").get("size")); logger.info("result file class: " + resultFileClass + ", expected file class: " + mapTest.get("expected").get("outfile").get("class")); boolean fileNamesEqual = resultFileName.equals(mapTest.get("expected").get("outfile").get("name")); boolean fileSizesEqual = resultFileSize == (int) mapTest.get("expected").get("outfile").get("size"); boolean fileClassesEqual = resultFileClass.equals(mapTest.get("expected").get("outfile").get("class")); if (!fileNamesEqual) { logger.error("result and expected file name are not equal!"); } else { if (!fileSizesEqual) { logger.error("result and expected file size are not equal!"); } else { if (!fileClassesEqual) { logger.error("result and expected file class are not equal!"); } else { logger.info("Test case passed."); return true; } } } return false; } public static void command(final String cmdline, final String directory) throws RabixTestException { try { Process process = new ProcessBuilder(new String[] { "bash", "-c", cmdline }).redirectErrorStream(true) .directory(new File(directory)).start(); BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream())); String line = null; while ((line = br.readLine()) != null) logger.info(line); int exitCode = process.waitFor(); if (0 != exitCode) { throw new RabixTestException("Error while executing command: Non zero exit code " + exitCode); } } catch (Exception e) { logger.error("Error while executing command. ", e); throw new RabixTestException("Error while executing command: " + e.getMessage()); } } /** * Reads content from a file */ static String readFile(String path, Charset encoding) throws IOException { byte[] encoded = Files.readAllBytes(Paths.get(path)); return new String(encoded, encoding); } @SuppressWarnings("unchecked") private static PropertiesConfiguration getConfig() throws RabixTestException { PropertiesConfiguration configuration = new PropertiesConfiguration(); String userDir = System.getProperty("user.dir"); if (userDir == null) { throw new RabixTestException("null value for user.dir property"); } File configDir = new File(userDir + "/rabix-integration-testing/config/test"); try { Iterator<File> iterator = FileUtils.iterateFiles(configDir, new String[] { "properties" }, true); while (iterator.hasNext()) { File configFile = iterator.next(); configuration.load(configFile); } return configuration; } catch (ConfigurationException e) { logger.error("Failed to load configuration properties", e); throw new RabixTestException("Failed to load configuration properties"); } } private static String getStringFromConfig(PropertiesConfiguration configuration, String key) { return configuration.getString(key); } }
Modification to ProcessBuilder in order to print stderr
rabix-integration-testing/src/main/java/org/rabix/tests/TestRunner.java
Modification to ProcessBuilder in order to print stderr
Java
apache-2.0
f882ee3b5cde4e0c3804668d110ccca08533b402
0
tripodsan/jackrabbit,Kast0rTr0y/jackrabbit,bartosz-grabski/jackrabbit,tripodsan/jackrabbit,bartosz-grabski/jackrabbit,sdmcraft/jackrabbit,afilimonov/jackrabbit,SylvesterAbreu/jackrabbit,SylvesterAbreu/jackrabbit,bartosz-grabski/jackrabbit,Kast0rTr0y/jackrabbit,kigsmtua/jackrabbit,Overseas-Student-Living/jackrabbit,afilimonov/jackrabbit,Kast0rTr0y/jackrabbit,kigsmtua/jackrabbit,afilimonov/jackrabbit,SylvesterAbreu/jackrabbit,tripodsan/jackrabbit,Overseas-Student-Living/jackrabbit,kigsmtua/jackrabbit,sdmcraft/jackrabbit,sdmcraft/jackrabbit,Overseas-Student-Living/jackrabbit
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.core.persistence.bundle.util; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import javax.jcr.RepositoryException; import javax.naming.Context; import javax.naming.NamingException; import javax.sql.DataSource; /** * A factory for new database connections. * Supported are regular JDBC drivers, as well as * JNDI resources. */ public class ConnectionFactory { /** * Open a connection using the specified properties. * The connection can be created using a JNDI Data Source as well. To do that, * the driver class name must reference a javax.naming.Context class * (for example javax.naming.InitialContext), and the URL must be the JNDI URL * (for example java:comp/env/jdbc/Test). * * @param driver the JDBC driver or the Context class * @param url the database URL * @param user the user name * @param password the password * @return the connection * @throws RepositoryException if the driver could not be loaded * @throws SQLException if the connection could not be established */ public static Connection getConnection(String driver, String url, String user, String password) throws RepositoryException, SQLException { if (driver != null || driver.length() > 0) { try { Class d = Class.forName(driver); if (javax.naming.Context.class.isAssignableFrom(d)) { // JNDI context Context context = (Context) d.newInstance(); DataSource ds = (DataSource) context.lookup(url); if (isNullOrEmpty(user) && isNullOrEmpty(password)) { return ds.getConnection(); } else { return ds.getConnection(user, password); } } else { try { // Workaround for Apache Derby: // The JDBC specification recommends the Class.forName method without the .newInstance() method call, // but it is required after a Derby 'shutdown'. d.newInstance(); } catch (Throwable e) { // Ignore exceptions // There's no requirement that a JDBC driver class has a public default constructor } } } catch (ClassNotFoundException e) { throw new RepositoryException("Could not load class " + driver, e); } catch (InstantiationException e) { throw new RepositoryException("Could not instantiate context " + driver, e); } catch (IllegalAccessException e) { throw new RepositoryException("Could not instantiate context " + driver, e); } catch (NamingException e) { throw new RepositoryException("Naming exception using " + driver + " url: " + url, e); } } return DriverManager.getConnection(url, user, password); } /** * Check if a String is null or empty (the length is null). * * @return true if it is null or empty */ private static boolean isNullOrEmpty(String s) { return s == null || s.length() == 0; } }
jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/util/ConnectionFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.core.persistence.bundle.util; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import javax.jcr.RepositoryException; import javax.naming.Context; import javax.naming.NamingException; import javax.sql.DataSource; /** * A factory for new database connections. * Supported are regular JDBC drivers, as well as * JNDI resources. */ public class ConnectionFactory { /** * Open a connection using the specified properties. * The connection can be created using a JNDI Data Source as well. To do that, * the driver class name must reference a javax.naming.Context class * (for example javax.naming.InitialContext), and the URL must be the JNDI URL * (for example java:comp/env/jdbc/Test). * * @param driver the JDBC driver or the Context class * @param url the database URL * @param user the user name * @param password the password * @return the connection * @throws RepositoryException if the driver could not be loaded * @throws SQLException if the connection could not be established */ public static Connection getConnection(String driver, String url, String user, String password) throws RepositoryException, SQLException { if (driver != null || driver.length() > 0) { try { Class d = Class.forName(driver); if (javax.naming.Context.class.isAssignableFrom(d)) { // JNDI context Context context = (Context) d.newInstance(); DataSource ds = (DataSource) context.lookup(url); return ds.getConnection(user, password); } else { try { // Workaround for Apache Derby: // The JDBC specification recommends the Class.forName method without the .newInstance() method call, // but it is required after a Derby 'shutdown'. d.newInstance(); } catch (Throwable e) { // Ignore exceptions // There's no requirement that a JDBC driver class has a public default constructor } } } catch (ClassNotFoundException e) { throw new RepositoryException("Could not load class " + driver, e); } catch (InstantiationException e) { throw new RepositoryException("Could not instantiate context " + driver, e); } catch (IllegalAccessException e) { throw new RepositoryException("Could not instantiate context " + driver, e); } catch (NamingException e) { throw new RepositoryException("Naming exception using " + driver + " url: " + url, e); } } return DriverManager.getConnection(url, user, password); } }
JCR-1305 JNDI data sources with BundleDbPersistenceManager: UnsupportedOperationException git-svn-id: 02b679d096242155780e1604e997947d154ee04a@613799 13f79535-47bb-0310-9956-ffa450edef68
jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/bundle/util/ConnectionFactory.java
JCR-1305 JNDI data sources with BundleDbPersistenceManager: UnsupportedOperationException
Java
apache-2.0
c729469aed46349b927d4038e67ff4fb5fa8ebd2
0
ansell/commons-rdf
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.rdf.api; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Stream; import org.junit.Assume; import org.junit.Before; import org.junit.Test; /** * Test Graph implementation * <p> * To add to your implementation's tests, create a subclass with a name ending * in <code>Test</code> and provide {@link #createFactory()} which minimally * must support {@link RDF#createGraph()} and {@link RDF#createIRI(String)}, but * ideally support all operations. * <p> * This test uses try-with-resources blocks for calls to {@link Graph#stream()} * and {@link Graph#iterate()}. * * @see Graph * @see RDF */ public abstract class AbstractGraphTest { protected RDF factory; protected Graph graph; protected IRI alice; protected IRI bob; protected IRI name; protected IRI knows; protected IRI member; protected BlankNode bnode1; protected BlankNode bnode2; protected Literal aliceName; protected Literal bobName; protected Literal secretClubName; protected Literal companyName; protected Triple bobNameTriple; /** * * This method must be overridden by the implementing test to provide a * factory for the test to create {@link Graph}, {@link IRI} etc. * * @return {@link RDF} instance to be tested. */ protected abstract RDF createFactory(); @Before public void createGraphAndAdd() { factory = createFactory(); graph = factory.createGraph(); assertEquals(0, graph.size()); alice = factory.createIRI("http://example.com/alice"); bob = factory.createIRI("http://example.com/bob"); name = factory.createIRI("http://xmlns.com/foaf/0.1/name"); knows = factory.createIRI("http://xmlns.com/foaf/0.1/knows"); member = factory.createIRI("http://xmlns.com/foaf/0.1/member"); try { bnode1 = factory.createBlankNode("org1"); bnode2 = factory.createBlankNode("org2"); } catch (final UnsupportedOperationException ex) { // leave as null } try { secretClubName = factory.createLiteral("The Secret Club"); companyName = factory.createLiteral("A company"); aliceName = factory.createLiteral("Alice"); bobName = factory.createLiteral("Bob", "en-US"); } catch (final UnsupportedOperationException ex) { // leave as null } if (aliceName != null) { graph.add(alice, name, aliceName); } graph.add(alice, knows, bob); if (bnode1 != null) { graph.add(alice, member, bnode1); } if (bobName != null) { try { bobNameTriple = factory.createTriple(bob, name, bobName); } catch (final UnsupportedOperationException ex) { // leave as null } if (bobNameTriple != null) { graph.add(bobNameTriple); } } if (bnode1 != null) { graph.add(factory.createTriple(bob, member, bnode1)); graph.add(factory.createTriple(bob, member, bnode2)); if (secretClubName != null) { graph.add(bnode1, name, secretClubName); graph.add(bnode2, name, companyName); } } } @Test public void size() throws Exception { assertTrue(graph.size() > 0); Assume.assumeNotNull(bnode1, bnode2, aliceName, bobName, secretClubName, companyName, bobNameTriple); // Can only reliably predict size if we could create all triples assertEquals(8, graph.size()); } @Test public void iterate() throws Exception { Assume.assumeTrue(graph.size() > 0); final List<Triple> triples = new ArrayList<>(); for (final Triple t : graph.iterate()) { triples.add(t); } assertEquals(graph.size(), triples.size()); if (bobNameTriple != null) { assertTrue(triples.contains(bobNameTriple)); } // aborted iteration final Iterable<Triple> iterate = graph.iterate(); final Iterator<Triple> it = iterate.iterator(); assertTrue(it.hasNext()); it.next(); closeIterable(iterate); // second iteration - should start from fresh and // get the same count long count = 0; final Iterable<Triple> iterable = graph.iterate(); for (@SuppressWarnings("unused") final Triple t : iterable) { count++; } assertEquals(graph.size(), count); } /** * Special triple closing for RDF4J. */ private void closeIterable(final Iterable<Triple> iterate) throws Exception { if (iterate instanceof AutoCloseable) { ((AutoCloseable) iterate).close(); } } @Test public void iterateFilter() throws Exception { final List<RDFTerm> friends = new ArrayList<>(); final IRI alice = factory.createIRI("http://example.com/alice"); final IRI knows = factory.createIRI("http://xmlns.com/foaf/0.1/knows"); for (final Triple t : graph.iterate(alice, knows, null)) { friends.add(t.getObject()); } assertEquals(1, friends.size()); assertEquals(bob, friends.get(0)); // .. can we iterate over zero hits? final Iterable<Triple> iterate = graph.iterate(bob, knows, alice); for (final Triple unexpected : iterate) { fail("Unexpected triple " + unexpected); } // closeIterable(iterate); } @Test public void contains() throws Exception { assertFalse(graph.contains(bob, knows, alice)); // or so he claims.. assertTrue(graph.contains(alice, knows, bob)); try (Stream<? extends Triple> stream = graph.stream()) { final Optional<? extends Triple> first = stream.skip(4).findFirst(); Assume.assumeTrue(first.isPresent()); final Triple existingTriple = first.get(); assertTrue(graph.contains(existingTriple)); } final Triple nonExistingTriple = factory.createTriple(bob, knows, alice); assertFalse(graph.contains(nonExistingTriple)); Triple triple = null; try { triple = factory.createTriple(alice, knows, bob); } catch (final UnsupportedOperationException ex) { } if (triple != null) { // FIXME: Should not this always be true? // assertTrue(graph.contains(triple)); } } @Test public void remove() throws Exception { final long fullSize = graph.size(); graph.remove(alice, knows, bob); final long shrunkSize = graph.size(); assertEquals(1, fullSize - shrunkSize); graph.remove(alice, knows, bob); assertEquals(shrunkSize, graph.size()); // unchanged graph.add(alice, knows, bob); graph.add(alice, knows, bob); graph.add(alice, knows, bob); // Undetermined size at this point -- but at least it // should be bigger assertTrue(graph.size() > shrunkSize); // and after a single remove they should all be gone graph.remove(alice, knows, bob); assertEquals(shrunkSize, graph.size()); Triple otherTriple; try (Stream<? extends Triple> stream = graph.stream()) { final Optional<? extends Triple> anyTriple = stream.findAny(); Assume.assumeTrue(anyTriple.isPresent()); otherTriple = anyTriple.get(); } graph.remove(otherTriple); assertEquals(shrunkSize - 1, graph.size()); graph.remove(otherTriple); assertEquals(shrunkSize - 1, graph.size()); // no change // for some reason in rdf4j this causes duplicates! graph.add(otherTriple); // graph.stream().forEach(System.out::println); // should have increased assertTrue(graph.size() >= shrunkSize); } @Test public void clear() throws Exception { graph.clear(); assertFalse(graph.contains(alice, knows, bob)); assertEquals(0, graph.size()); graph.clear(); // no-op assertEquals(0, graph.size()); } @Test public void getTriples() throws Exception { long tripleCount; try (Stream<? extends Triple> stream = graph.stream()) { tripleCount = stream.count(); } assertTrue(tripleCount > 0); try (Stream<? extends Triple> stream = graph.stream()) { assertTrue(stream.allMatch(t -> graph.contains(t))); } // Check exact count Assume.assumeNotNull(bnode1, bnode2, aliceName, bobName, secretClubName, companyName, bobNameTriple); assertEquals(8, tripleCount); } @Test public void getTriplesQuery() throws Exception { try (Stream<? extends Triple> stream = graph.stream(alice, null, null)) { final long aliceCount = stream.count(); assertTrue(aliceCount > 0); Assume.assumeNotNull(aliceName); assertEquals(3, aliceCount); } Assume.assumeNotNull(bnode1, bnode2, bobName, companyName, secretClubName); try (Stream<? extends Triple> stream = graph.stream(null, name, null)) { assertEquals(4, stream.count()); } Assume.assumeNotNull(bnode1); try (Stream<? extends Triple> stream = graph.stream(null, member, null)) { assertEquals(3, stream.count()); } } @Test public void addBlankNodesFromMultipleGraphs() { try { // Create two separate Graph instances final Graph g1 = createGraph1(); final Graph g2 = createGraph2(); // and add them to a new Graph g3 final Graph g3 = factory.createGraph(); addAllTriples(g1, g3); addAllTriples(g2, g3); // Let's make a map to find all those blank nodes after insertion // (The Graph implementation is not currently required to // keep supporting those BlankNodes with contains() - see // COMMONSRDF-15) final Map<String, BlankNodeOrIRI> whoIsWho = new ConcurrentHashMap<>(); // ConcurrentHashMap as we will try parallel forEach below, // which should not give inconsistent results (it does with a // HashMap!) // look up BlankNodes by name final IRI name = factory.createIRI("http://xmlns.com/foaf/0.1/name"); try (Stream<? extends Triple> stream = g3.stream(null, name, null)) { stream.parallel().forEach(t -> whoIsWho.put(t.getObject().ntriplesString(), t.getSubject())); } assertEquals(4, whoIsWho.size()); // and contains 4 unique values assertEquals(4, new HashSet<>(whoIsWho.values()).size()); final BlankNodeOrIRI b1Alice = whoIsWho.get("\"Alice\""); assertNotNull(b1Alice); final BlankNodeOrIRI b2Bob = whoIsWho.get("\"Bob\""); assertNotNull(b2Bob); final BlankNodeOrIRI b1Charlie = whoIsWho.get("\"Charlie\""); assertNotNull(b1Charlie); final BlankNodeOrIRI b2Dave = whoIsWho.get("\"Dave\""); assertNotNull(b2Dave); // All blank nodes should differ notEquals(b1Alice, b2Bob); notEquals(b1Alice, b1Charlie); notEquals(b1Alice, b2Dave); notEquals(b2Bob, b1Charlie); notEquals(b2Bob, b2Dave); notEquals(b1Charlie, b2Dave); // And we should be able to query with them again // as we got them back from g3 final IRI hasChild = factory.createIRI("http://example.com/hasChild"); assertTrue(g3.contains(b1Alice, hasChild, b2Bob)); assertTrue(g3.contains(b2Dave, hasChild, b1Charlie)); // But not assertFalse(g3.contains(b1Alice, hasChild, b1Alice)); assertFalse(g3.contains(b1Alice, hasChild, b1Charlie)); assertFalse(g3.contains(b1Alice, hasChild, b2Dave)); // nor assertFalse(g3.contains(b2Dave, hasChild, b1Alice)); assertFalse(g3.contains(b2Dave, hasChild, b1Alice)); // and these don't have any children (as far as we know) assertFalse(g3.contains(b2Bob, hasChild, null)); assertFalse(g3.contains(b1Charlie, hasChild, null)); } catch (final UnsupportedOperationException ex) { Assume.assumeNoException(ex); } } @Test public void containsLanguageTagsCaseInsensitive() { // COMMONSRDF-51: Ensure we can add/contains/remove with any casing // of literal language tag final Literal lower = factory.createLiteral("Hello", "en-gb"); final Literal upper = factory.createLiteral("Hello", "EN-GB"); final Literal mixed = factory.createLiteral("Hello", "en-GB"); final IRI example1 = factory.createIRI("http://example.com/s1"); final IRI greeting = factory.createIRI("http://example.com/greeting"); final Graph graph = factory.createGraph(); graph.add(example1, greeting, upper); assertTrue(graph.contains(factory.createTriple(example1, greeting, upper))); assertTrue(graph.contains(factory.createTriple(example1, greeting, lower))); assertTrue(graph.contains(factory.createTriple(example1, greeting, mixed))); // or as patterns assertTrue(graph.contains(null, null, upper)); assertTrue(graph.contains(null, null, lower)); assertTrue(graph.contains(null, null, mixed)); // Remove should also honour any case graph.remove(example1, null, mixed); // no more greetings of any kind assertFalse(graph.contains(null, greeting, null)); } @Test public void containsLanguageTagsCaseInsensitiveTurkish() { // COMMONSRDF-51: Special test for Turkish issue where // "i".toLowerCase() != "i" // See also: // https://garygregory.wordpress.com/2015/11/03/java-lowercase-conversion-turkey/ // This is similar to the test in AbstractRDFTest, but on a graph Locale defaultLocale = Locale.getDefault(); try { Locale.setDefault(Locale.ROOT); final Literal lowerROOT = factory.createLiteral("moi", "fi"); final Literal upperROOT = factory.createLiteral("moi", "FI"); final Literal mixedROOT = factory.createLiteral("moi", "fI"); final Graph g = factory.createGraph(); final IRI exampleROOT = factory.createIRI("http://example.com/s1"); final IRI greeting = factory.createIRI("http://example.com/greeting"); g.add(exampleROOT, greeting, mixedROOT); Locale turkish = Locale.forLanguageTag("TR"); Locale.setDefault(turkish); // If the below assertion fails, then the Turkish // locale no longer have this peculiarity that // we want to test. Assume.assumeFalse("FI".toLowerCase().equals("fi")); // Below is pretty much the same as in // containsLanguageTagsCaseInsensitive() final Literal lower = factory.createLiteral("moi", "fi"); final Literal upper = factory.createLiteral("moi", "FI"); final Literal mixed = factory.createLiteral("moi", "fI"); final IRI exampleTR = factory.createIRI("http://example.com/s2"); g.add(exampleTR, greeting, upper); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, upper))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, upperROOT))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, lower))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, lowerROOT))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, mixed))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, mixedROOT))); assertTrue(g.contains(exampleTR, null, upper)); assertTrue(g.contains(exampleTR, null, upperROOT)); assertTrue(g.contains(exampleTR, null, lower)); assertTrue(g.contains(exampleTR, null, lowerROOT)); assertTrue(g.contains(exampleTR, null, mixed)); assertTrue(g.contains(exampleTR, null, mixedROOT)); g.remove(exampleTR, null, mixed); // No more greetings for exampleTR assertFalse(g.contains(exampleTR, null, null)); // What about the triple we added while in ROOT locale? assertTrue(g.contains(factory.createTriple(exampleROOT, greeting, upper))); assertTrue(g.contains(factory.createTriple(exampleROOT, greeting, lower))); assertTrue(g.contains(factory.createTriple(exampleROOT, greeting, mixed))); assertTrue(g.contains(exampleROOT, null, upper)); assertTrue(g.contains(exampleROOT, null, lower)); assertTrue(g.contains(exampleROOT, null, mixed)); g.remove(exampleROOT, null, mixed); // No more greetings of any kind assertFalse(g.contains(null, null, null)); } finally { Locale.setDefault(defaultLocale); } } private void notEquals(final BlankNodeOrIRI node1, final BlankNodeOrIRI node2) { assertFalse(node1.equals(node2)); // in which case we should be able to assume // (as they are in the same graph) assertFalse(node1.ntriplesString().equals(node2.ntriplesString())); } /** * Add all triples from the source to the target. * <p> * The triples may be copied in any order. No special conversion or * adaptation of {@link BlankNode}s are performed. * * @param source * Source Graph to copy triples from * @param target * Target Graph where triples will be added */ private void addAllTriples(final Graph source, final Graph target) { // unordered() as we don't need to preserve triple order // sequential() as we don't (currently) require target Graph to be // thread-safe try (Stream<? extends Triple> stream = source.stream()) { stream.unordered().sequential().forEach(t -> target.add(t)); } } /** * Make a new graph with two BlankNodes - each with a different * uniqueReference */ private Graph createGraph1() { final RDF factory1 = createFactory(); final IRI name = factory1.createIRI("http://xmlns.com/foaf/0.1/name"); final Graph g1 = factory1.createGraph(); final BlankNode b1 = createOwnBlankNode("b1", "0240eaaa-d33e-4fc0-a4f1-169d6ced3680"); g1.add(b1, name, factory1.createLiteral("Alice")); final BlankNode b2 = createOwnBlankNode("b2", "9de7db45-0ce7-4b0f-a1ce-c9680ffcfd9f"); g1.add(b2, name, factory1.createLiteral("Bob")); final IRI hasChild = factory1.createIRI("http://example.com/hasChild"); g1.add(b1, hasChild, b2); return g1; } /** * Create a different implementation of BlankNode to be tested with * graph.add(a,b,c); (the implementation may or may not then choose to * translate such to its own instances) * * @param name * @return */ private BlankNode createOwnBlankNode(final String name, final String uuid) { return new BlankNode() { @Override public String ntriplesString() { return "_: " + name; } @Override public String uniqueReference() { return uuid; } @Override public int hashCode() { return uuid.hashCode(); } @Override public boolean equals(final Object obj) { if (!(obj instanceof BlankNode)) { return false; } final BlankNode other = (BlankNode) obj; return uuid.equals(other.uniqueReference()); } }; } private Graph createGraph2() { final RDF factory2 = createFactory(); final IRI name = factory2.createIRI("http://xmlns.com/foaf/0.1/name"); final Graph g2 = factory2.createGraph(); final BlankNode b1 = createOwnBlankNode("b1", "bc8d3e45-a08f-421d-85b3-c25b373abf87"); g2.add(b1, name, factory2.createLiteral("Charlie")); final BlankNode b2 = createOwnBlankNode("b2", "2209097a-5078-4b03-801a-6a2d2f50d739"); g2.add(b2, name, factory2.createLiteral("Dave")); final IRI hasChild = factory2.createIRI("http://example.com/hasChild"); // NOTE: Opposite direction of loadGraph1 g2.add(b2, hasChild, b1); return g2; } /** * An attempt to use the Java 8 streams to look up a more complicated query. * <p> * FYI, the equivalent SPARQL version (untested): * * <pre> * SELECT ?orgName WHERE { * ?org foaf:name ?orgName . * ?alice foaf:member ?org . * ?bob foaf:member ?org . * ?alice foaf:knows ?bob . * FILTER NOT EXIST { ?bob foaf:knows ?alice } * } * </pre> * * @throws Exception If test fails */ @Test public void whyJavaStreamsMightNotTakeOverFromSparql() throws Exception { Assume.assumeNotNull(bnode1, bnode2, secretClubName); // Find a secret organizations try (Stream<? extends Triple> stream = graph.stream(null, knows, null)) { assertEquals("\"The Secret Club\"", // Find One-way "knows" stream.filter(t -> !graph.contains((BlankNodeOrIRI) t.getObject(), knows, t.getSubject())) .map(knowsTriple -> { try (Stream<? extends Triple> memberOf = graph // and those they know, what are they // member of? .stream((BlankNodeOrIRI) knowsTriple.getObject(), member, null)) { return memberOf // keep those which first-guy is a // member of .filter(memberTriple -> graph.contains(knowsTriple.getSubject(), member, // First hit is good enough memberTriple.getObject())) .findFirst().get().getObject(); } }) // then look up the name of that org .map(org -> { try (Stream<? extends Triple> orgName = graph.stream((BlankNodeOrIRI) org, name, null)) { return orgName.findFirst().get().getObject().ntriplesString(); } }).findFirst().get()); } } }
api/src/test/java/org/apache/commons/rdf/api/AbstractGraphTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.rdf.api; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Stream; import org.junit.Assume; import org.junit.Before; import org.junit.Test; /** * Test Graph implementation * <p> * To add to your implementation's tests, create a subclass with a name ending * in <code>Test</code> and provide {@link #createFactory()} which minimally * must support {@link RDF#createGraph()} and {@link RDF#createIRI(String)}, but * ideally support all operations. * <p> * This test uses try-with-resources blocks for calls to {@link Graph#stream()} * and {@link Graph#iterate()}. * * @see Graph * @see RDF */ public abstract class AbstractGraphTest { protected RDF factory; protected Graph graph; protected IRI alice; protected IRI bob; protected IRI name; protected IRI knows; protected IRI member; protected BlankNode bnode1; protected BlankNode bnode2; protected Literal aliceName; protected Literal bobName; protected Literal secretClubName; protected Literal companyName; protected Triple bobNameTriple; /** * * This method must be overridden by the implementing test to provide a * factory for the test to create {@link Graph}, {@link IRI} etc. * * @return {@link RDF} instance to be tested. */ protected abstract RDF createFactory(); @Before public void createGraphAndAdd() { factory = createFactory(); graph = factory.createGraph(); assertEquals(0, graph.size()); alice = factory.createIRI("http://example.com/alice"); bob = factory.createIRI("http://example.com/bob"); name = factory.createIRI("http://xmlns.com/foaf/0.1/name"); knows = factory.createIRI("http://xmlns.com/foaf/0.1/knows"); member = factory.createIRI("http://xmlns.com/foaf/0.1/member"); try { bnode1 = factory.createBlankNode("org1"); bnode2 = factory.createBlankNode("org2"); } catch (final UnsupportedOperationException ex) { // leave as null } try { secretClubName = factory.createLiteral("The Secret Club"); companyName = factory.createLiteral("A company"); aliceName = factory.createLiteral("Alice"); bobName = factory.createLiteral("Bob", "en-US"); } catch (final UnsupportedOperationException ex) { // leave as null } if (aliceName != null) { graph.add(alice, name, aliceName); } graph.add(alice, knows, bob); if (bnode1 != null) { graph.add(alice, member, bnode1); } if (bobName != null) { try { bobNameTriple = factory.createTriple(bob, name, bobName); } catch (final UnsupportedOperationException ex) { // leave as null } if (bobNameTriple != null) { graph.add(bobNameTriple); } } if (bnode1 != null) { graph.add(factory.createTriple(bob, member, bnode1)); graph.add(factory.createTriple(bob, member, bnode2)); if (secretClubName != null) { graph.add(bnode1, name, secretClubName); graph.add(bnode2, name, companyName); } } } @Test public void size() throws Exception { assertTrue(graph.size() > 0); Assume.assumeNotNull(bnode1, bnode2, aliceName, bobName, secretClubName, companyName, bobNameTriple); // Can only reliably predict size if we could create all triples assertEquals(8, graph.size()); } @Test public void iterate() throws Exception { Assume.assumeTrue(graph.size() > 0); final List<Triple> triples = new ArrayList<>(); for (final Triple t : graph.iterate()) { triples.add(t); } assertEquals(graph.size(), triples.size()); if (bobNameTriple != null) { assertTrue(triples.contains(bobNameTriple)); } // aborted iteration final Iterable<Triple> iterate = graph.iterate(); final Iterator<Triple> it = iterate.iterator(); assertTrue(it.hasNext()); it.next(); closeIterable(iterate); // second iteration - should start from fresh and // get the same count long count = 0; final Iterable<Triple> iterable = graph.iterate(); for (@SuppressWarnings("unused") final Triple t : iterable) { count++; } assertEquals(graph.size(), count); } /** * Special triple closing for RDF4J. */ private void closeIterable(final Iterable<Triple> iterate) throws Exception { if (iterate instanceof AutoCloseable) { ((AutoCloseable) iterate).close(); } } @Test public void iterateFilter() throws Exception { final List<RDFTerm> friends = new ArrayList<>(); final IRI alice = factory.createIRI("http://example.com/alice"); final IRI knows = factory.createIRI("http://xmlns.com/foaf/0.1/knows"); for (final Triple t : graph.iterate(alice, knows, null)) { friends.add(t.getObject()); } assertEquals(1, friends.size()); assertEquals(bob, friends.get(0)); // .. can we iterate over zero hits? final Iterable<Triple> iterate = graph.iterate(bob, knows, alice); for (final Triple unexpected : iterate) { fail("Unexpected triple " + unexpected); } // closeIterable(iterate); } @Test public void contains() throws Exception { assertFalse(graph.contains(bob, knows, alice)); // or so he claims.. assertTrue(graph.contains(alice, knows, bob)); try (Stream<? extends Triple> stream = graph.stream()) { final Optional<? extends Triple> first = stream.skip(4).findFirst(); Assume.assumeTrue(first.isPresent()); final Triple existingTriple = first.get(); assertTrue(graph.contains(existingTriple)); } final Triple nonExistingTriple = factory.createTriple(bob, knows, alice); assertFalse(graph.contains(nonExistingTriple)); Triple triple = null; try { triple = factory.createTriple(alice, knows, bob); } catch (final UnsupportedOperationException ex) { } if (triple != null) { // FIXME: Should not this always be true? // assertTrue(graph.contains(triple)); } } @Test public void remove() throws Exception { final long fullSize = graph.size(); graph.remove(alice, knows, bob); final long shrunkSize = graph.size(); assertEquals(1, fullSize - shrunkSize); graph.remove(alice, knows, bob); assertEquals(shrunkSize, graph.size()); // unchanged graph.add(alice, knows, bob); graph.add(alice, knows, bob); graph.add(alice, knows, bob); // Undetermined size at this point -- but at least it // should be bigger assertTrue(graph.size() > shrunkSize); // and after a single remove they should all be gone graph.remove(alice, knows, bob); assertEquals(shrunkSize, graph.size()); Triple otherTriple; try (Stream<? extends Triple> stream = graph.stream()) { final Optional<? extends Triple> anyTriple = stream.findAny(); Assume.assumeTrue(anyTriple.isPresent()); otherTriple = anyTriple.get(); } graph.remove(otherTriple); assertEquals(shrunkSize - 1, graph.size()); graph.remove(otherTriple); assertEquals(shrunkSize - 1, graph.size()); // no change // for some reason in rdf4j this causes duplicates! graph.add(otherTriple); // graph.stream().forEach(System.out::println); // should have increased assertTrue(graph.size() >= shrunkSize); } @Test public void clear() throws Exception { graph.clear(); assertFalse(graph.contains(alice, knows, bob)); assertEquals(0, graph.size()); graph.clear(); // no-op assertEquals(0, graph.size()); } @Test public void getTriples() throws Exception { long tripleCount; try (Stream<? extends Triple> stream = graph.stream()) { tripleCount = stream.count(); } assertTrue(tripleCount > 0); try (Stream<? extends Triple> stream = graph.stream()) { assertTrue(stream.allMatch(t -> graph.contains(t))); } // Check exact count Assume.assumeNotNull(bnode1, bnode2, aliceName, bobName, secretClubName, companyName, bobNameTriple); assertEquals(8, tripleCount); } @Test public void getTriplesQuery() throws Exception { try (Stream<? extends Triple> stream = graph.stream(alice, null, null)) { final long aliceCount = stream.count(); assertTrue(aliceCount > 0); Assume.assumeNotNull(aliceName); assertEquals(3, aliceCount); } Assume.assumeNotNull(bnode1, bnode2, bobName, companyName, secretClubName); try (Stream<? extends Triple> stream = graph.stream(null, name, null)) { assertEquals(4, stream.count()); } Assume.assumeNotNull(bnode1); try (Stream<? extends Triple> stream = graph.stream(null, member, null)) { assertEquals(3, stream.count()); } } @Test public void addBlankNodesFromMultipleGraphs() { try { // Create two separate Graph instances final Graph g1 = createGraph1(); final Graph g2 = createGraph2(); // and add them to a new Graph g3 final Graph g3 = factory.createGraph(); addAllTriples(g1, g3); addAllTriples(g2, g3); // Let's make a map to find all those blank nodes after insertion // (The Graph implementation is not currently required to // keep supporting those BlankNodes with contains() - see // COMMONSRDF-15) final Map<String, BlankNodeOrIRI> whoIsWho = new ConcurrentHashMap<>(); // ConcurrentHashMap as we will try parallel forEach below, // which should not give inconsistent results (it does with a // HashMap!) // look up BlankNodes by name final IRI name = factory.createIRI("http://xmlns.com/foaf/0.1/name"); try (Stream<? extends Triple> stream = g3.stream(null, name, null)) { stream.parallel().forEach(t -> whoIsWho.put(t.getObject().ntriplesString(), t.getSubject())); } assertEquals(4, whoIsWho.size()); // and contains 4 unique values assertEquals(4, new HashSet<>(whoIsWho.values()).size()); final BlankNodeOrIRI b1Alice = whoIsWho.get("\"Alice\""); assertNotNull(b1Alice); final BlankNodeOrIRI b2Bob = whoIsWho.get("\"Bob\""); assertNotNull(b2Bob); final BlankNodeOrIRI b1Charlie = whoIsWho.get("\"Charlie\""); assertNotNull(b1Charlie); final BlankNodeOrIRI b2Dave = whoIsWho.get("\"Dave\""); assertNotNull(b2Dave); // All blank nodes should differ notEquals(b1Alice, b2Bob); notEquals(b1Alice, b1Charlie); notEquals(b1Alice, b2Dave); notEquals(b2Bob, b1Charlie); notEquals(b2Bob, b2Dave); notEquals(b1Charlie, b2Dave); // And we should be able to query with them again // as we got them back from g3 final IRI hasChild = factory.createIRI("http://example.com/hasChild"); assertTrue(g3.contains(b1Alice, hasChild, b2Bob)); assertTrue(g3.contains(b2Dave, hasChild, b1Charlie)); // But not assertFalse(g3.contains(b1Alice, hasChild, b1Alice)); assertFalse(g3.contains(b1Alice, hasChild, b1Charlie)); assertFalse(g3.contains(b1Alice, hasChild, b2Dave)); // nor assertFalse(g3.contains(b2Dave, hasChild, b1Alice)); assertFalse(g3.contains(b2Dave, hasChild, b1Alice)); // and these don't have any children (as far as we know) assertFalse(g3.contains(b2Bob, hasChild, null)); assertFalse(g3.contains(b1Charlie, hasChild, null)); } catch (final UnsupportedOperationException ex) { Assume.assumeNoException(ex); } } @Test public void containsLanguageTagsCaseInsensitive() { // COMMONSRDF-51: Ensure we can add/contains/remove with any casing // of literal language tag final Literal lower = factory.createLiteral("Hello", "en-gb"); final Literal upper = factory.createLiteral("Hello", "EN-GB"); final Literal mixed = factory.createLiteral("Hello", "en-GB"); final IRI example1 = factory.createIRI("http://example.com/s1"); final IRI greeting = factory.createIRI("http://example.com/greeting"); final Graph graph = factory.createGraph(); graph.add(example1, greeting, upper); assertTrue(graph.contains(factory.createTriple(example1, greeting, upper))); assertTrue(graph.contains(factory.createTriple(example1, greeting, lower))); assertTrue(graph.contains(factory.createTriple(example1, greeting, mixed))); // or as patterns assertTrue(graph.contains(null, null, upper)); assertTrue(graph.contains(null, null, lower)); assertTrue(graph.contains(null, null, mixed)); // Remove should also honour any case graph.remove(example1, greeting, mixed); assertFalse(graph.contains(null, greeting, null)); } @Test public void containsLanguageTagsCaseInsensitiveTurkish() { // COMMONSRDF-51: Special test for Turkish issue where // "i".toLowerCase() != "i" // See also: // https://garygregory.wordpress.com/2015/11/03/java-lowercase-conversion-turkey/ // This is similar to the test in AbstractRDFTest, but on a graph Locale defaultLocale = Locale.getDefault(); try { Locale.setDefault(Locale.ROOT); final Literal lowerROOT = factory.createLiteral("moi", "fi"); final Literal upperROOT = factory.createLiteral("moi", "FI"); final Literal mixedROOT = factory.createLiteral("moi", "fI"); final Graph g = factory.createGraph(); final IRI exampleROOT = factory.createIRI("http://example.com/s1"); final IRI greeting = factory.createIRI("http://example.com/greeting"); g.add(exampleROOT, greeting, mixedROOT); Locale turkish = Locale.forLanguageTag("TR"); Locale.setDefault(turkish); // If the below assertion fails, then the Turkish // locale no longer have this peculiarity that // we want to test. Assume.assumeFalse("FI".toLowerCase().equals("fi")); // Below is pretty much the same as in // containsLanguageTagsCaseInsensitive() final Literal lower = factory.createLiteral("moi", "fi"); final Literal upper = factory.createLiteral("moi", "FI"); final Literal mixed = factory.createLiteral("moi", "fI"); final IRI exampleTR = factory.createIRI("http://example.com/s2"); g.add(exampleTR, greeting, upper); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, upper))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, upperROOT))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, lower))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, lowerROOT))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, mixed))); assertTrue(g.contains(factory.createTriple(exampleTR, greeting, mixedROOT))); assertTrue(g.contains(exampleTR, null, upper)); assertTrue(g.contains(exampleTR, null, upperROOT)); assertTrue(g.contains(exampleTR, null, lower)); assertTrue(g.contains(exampleTR, null, lowerROOT)); assertTrue(g.contains(exampleTR, null, mixed)); assertTrue(g.contains(exampleTR, null, mixedROOT)); g.remove(exampleTR, greeting, mixed); assertFalse(g.contains(exampleTR, null, null)); // What about the triple we added while in ROOT locale? assertTrue(g.contains(factory.createTriple(exampleROOT, greeting, upper))); assertTrue(g.contains(factory.createTriple(exampleROOT, greeting, lower))); assertTrue(g.contains(factory.createTriple(exampleROOT, greeting, mixed))); assertTrue(g.contains(exampleROOT, null, upper)); assertTrue(g.contains(exampleROOT, null, lower)); assertTrue(g.contains(exampleROOT, null, mixed)); g.remove(exampleROOT, greeting, mixed); assertFalse(g.contains(exampleROOT, null, null)); } finally { Locale.setDefault(defaultLocale); } } private void notEquals(final BlankNodeOrIRI node1, final BlankNodeOrIRI node2) { assertFalse(node1.equals(node2)); // in which case we should be able to assume // (as they are in the same graph) assertFalse(node1.ntriplesString().equals(node2.ntriplesString())); } /** * Add all triples from the source to the target. * <p> * The triples may be copied in any order. No special conversion or * adaptation of {@link BlankNode}s are performed. * * @param source * Source Graph to copy triples from * @param target * Target Graph where triples will be added */ private void addAllTriples(final Graph source, final Graph target) { // unordered() as we don't need to preserve triple order // sequential() as we don't (currently) require target Graph to be // thread-safe try (Stream<? extends Triple> stream = source.stream()) { stream.unordered().sequential().forEach(t -> target.add(t)); } } /** * Make a new graph with two BlankNodes - each with a different * uniqueReference */ private Graph createGraph1() { final RDF factory1 = createFactory(); final IRI name = factory1.createIRI("http://xmlns.com/foaf/0.1/name"); final Graph g1 = factory1.createGraph(); final BlankNode b1 = createOwnBlankNode("b1", "0240eaaa-d33e-4fc0-a4f1-169d6ced3680"); g1.add(b1, name, factory1.createLiteral("Alice")); final BlankNode b2 = createOwnBlankNode("b2", "9de7db45-0ce7-4b0f-a1ce-c9680ffcfd9f"); g1.add(b2, name, factory1.createLiteral("Bob")); final IRI hasChild = factory1.createIRI("http://example.com/hasChild"); g1.add(b1, hasChild, b2); return g1; } /** * Create a different implementation of BlankNode to be tested with * graph.add(a,b,c); (the implementation may or may not then choose to * translate such to its own instances) * * @param name * @return */ private BlankNode createOwnBlankNode(final String name, final String uuid) { return new BlankNode() { @Override public String ntriplesString() { return "_: " + name; } @Override public String uniqueReference() { return uuid; } @Override public int hashCode() { return uuid.hashCode(); } @Override public boolean equals(final Object obj) { if (!(obj instanceof BlankNode)) { return false; } final BlankNode other = (BlankNode) obj; return uuid.equals(other.uniqueReference()); } }; } private Graph createGraph2() { final RDF factory2 = createFactory(); final IRI name = factory2.createIRI("http://xmlns.com/foaf/0.1/name"); final Graph g2 = factory2.createGraph(); final BlankNode b1 = createOwnBlankNode("b1", "bc8d3e45-a08f-421d-85b3-c25b373abf87"); g2.add(b1, name, factory2.createLiteral("Charlie")); final BlankNode b2 = createOwnBlankNode("b2", "2209097a-5078-4b03-801a-6a2d2f50d739"); g2.add(b2, name, factory2.createLiteral("Dave")); final IRI hasChild = factory2.createIRI("http://example.com/hasChild"); // NOTE: Opposite direction of loadGraph1 g2.add(b2, hasChild, b1); return g2; } /** * An attempt to use the Java 8 streams to look up a more complicated query. * <p> * FYI, the equivalent SPARQL version (untested): * * <pre> * SELECT ?orgName WHERE { * ?org foaf:name ?orgName . * ?alice foaf:member ?org . * ?bob foaf:member ?org . * ?alice foaf:knows ?bob . * FILTER NOT EXIST { ?bob foaf:knows ?alice } * } * </pre> * * @throws Exception If test fails */ @Test public void whyJavaStreamsMightNotTakeOverFromSparql() throws Exception { Assume.assumeNotNull(bnode1, bnode2, secretClubName); // Find a secret organizations try (Stream<? extends Triple> stream = graph.stream(null, knows, null)) { assertEquals("\"The Secret Club\"", // Find One-way "knows" stream.filter(t -> !graph.contains((BlankNodeOrIRI) t.getObject(), knows, t.getSubject())) .map(knowsTriple -> { try (Stream<? extends Triple> memberOf = graph // and those they know, what are they // member of? .stream((BlankNodeOrIRI) knowsTriple.getObject(), member, null)) { return memberOf // keep those which first-guy is a // member of .filter(memberTriple -> graph.contains(knowsTriple.getSubject(), member, // First hit is good enough memberTriple.getObject())) .findFirst().get().getObject(); } }) // then look up the name of that org .map(org -> { try (Stream<? extends Triple> orgName = graph.stream((BlankNodeOrIRI) org, name, null)) { return orgName.findFirst().get().getObject().ntriplesString(); } }).findFirst().get()); } } }
COMMONSRDF-51: Use patterns for remove/contains
api/src/test/java/org/apache/commons/rdf/api/AbstractGraphTest.java
COMMONSRDF-51: Use patterns for remove/contains
Java
apache-2.0
8e7469c931e14af010ee9beedd55b835884e0915
0
eatnoodles/LineBotCC
package com.cc.service.impl; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.ExecutionException; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.cc.Application; import com.cc.bean.CommandBean; import com.cc.bean.IrolCommandBean; import com.cc.bean.OtherCommandBean; import com.cc.bean.WoWCommandBean; import com.cc.dao.UserImgFuncDao; import com.cc.dao.UserTalkLevelDao; import com.cc.dao.WoWCharacterMappingDao; import com.cc.entity.UserImgFunc; import com.cc.entity.UserTalkLevel; import com.cc.entity.WoWCharacterMapping; import com.cc.entity.key.UserTalkLevelKey; import com.cc.enums.OtherEventEnum; import com.cc.service.IIrolService; import com.cc.service.INudoCCService; import com.cc.service.IWoWService; import com.cc.wow.boss.BossMaster; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.linecorp.bot.client.LineMessagingClient; import com.linecorp.bot.client.LineMessagingClientImpl; import com.linecorp.bot.client.LineMessagingService; import com.linecorp.bot.client.LineMessagingServiceBuilder; import com.linecorp.bot.model.event.MessageEvent; import com.linecorp.bot.model.event.message.TextMessageContent; import com.linecorp.bot.model.message.ImageMessage; import com.linecorp.bot.model.message.Message; import com.linecorp.bot.model.message.StickerMessage; import com.linecorp.bot.model.message.TextMessage; import com.linecorp.bot.model.profile.UserProfileResponse; import com.utils.NudoCCUtil; /** * @author Caleb.Cheng * */ @Component public class NudoCCServiceImpl implements INudoCCService { private static BossMaster wowBossMaster; private static final Logger LOG = LoggerFactory.getLogger(NudoCCServiceImpl.class); private LineMessagingClient lineMessagingClient; { LineMessagingService lineMessagingService = LineMessagingServiceBuilder.create(System.getenv("LINE_BOT_CHANNEL_TOKEN")).build(); lineMessagingClient = new LineMessagingClientImpl(lineMessagingService); } static { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try { wowBossMaster = mapper.readValue(Application.class.getResourceAsStream("/wowBoss.json"), new TypeReference<BossMaster>(){}); } catch (IOException e) { e.printStackTrace(); } } @Autowired private IWoWService wowService; @Autowired private WoWCharacterMappingDao wowCharacterMappingDao; @Autowired private IIrolService irolService; @Autowired private UserTalkLevelDao userTalkLevelDao; @Autowired private UserImgFuncDao userImgFuncDao; /** * find line sticker message * * @param packageId * @param stickerId * @return */ @Override public Message findStickerMessage(String packageId, String stickerId) { return new StickerMessage(packageId, stickerId); } /** * get line display name * * @param lineId * @return * @throws InterruptedException * @throws ExecutionException */ @Override public String getDisplayName(String lineId) throws InterruptedException, ExecutionException { UserProfileResponse userProfileResponse = lineMessagingClient.getProfile(lineId).get(); return userProfileResponse.getDisplayName(); } /** * generator command bean * * @param event * @return */ @Override public CommandBean genCommandBean(String command, String senderId, String userId) { if (StringUtils.isBlank(command)) { return null; } if (command.startsWith(NudoCCUtil.WOW_COMMAND)) { return wowService.genWoWCommandBean(command, senderId, userId); } if (irolService.isIrolCommand(command)) { return irolService.genIrolCommandBean(command, senderId, userId); } return this.genOtherCommandBean(command, senderId, userId); } /** * * @param command * @param senderId * @param userId * @return */ private CommandBean genOtherCommandBean(String command, String senderId, String userId) { OtherCommandBean bean = new OtherCommandBean(command, senderId, userId); Pattern pattern = Pattern.compile(NudoCCUtil.WCL_USER_COMMANDS); //other command if (command.toLowerCase().startsWith(NudoCCUtil.ROLL_COMMAND)) { bean.setEventEnum(OtherEventEnum.ROLL); } else if (command.equalsIgnoreCase(NudoCCUtil.GET_USER_ID_COMMAND)) { bean.setEventEnum(OtherEventEnum.GET_USER_ID); } else if (command.equals(NudoCCUtil.LEAVE_COMMAND)) { bean.setEventEnum(OtherEventEnum.LEAVE); } else if (command.equals(NudoCCUtil.WHOAMI_COMMAND)) { bean.setEventEnum(OtherEventEnum.WHOAMI); } else if (pattern.matcher(command.toLowerCase()).matches()) { bean.setEventEnum(OtherEventEnum.WCL_USER); } else if (command.indexOf(NudoCCUtil.IMG1_COMMAND) != -1) { bean.setEventEnum(OtherEventEnum.IMG1); } else if (command.equals(NudoCCUtil.USER_ROLL_START_COMMAND)) { bean.setEventEnum(OtherEventEnum.USER_ROLL_START); } else if (command.equals(NudoCCUtil.USER_ROLL_END_COMMAND)) { bean.setEventEnum(OtherEventEnum.USER_ROLL_END); } else if (command.equals(NudoCCUtil.EMOJI_COMMAND)) { bean.setEventEnum(OtherEventEnum.EMOJI); } else if (command.startsWith(NudoCCUtil.PARROT_COMMAND)) { bean.setEventEnum(OtherEventEnum.PARROT); } else { bean.setEventEnum(OtherEventEnum.TALKING); } return bean; } /** * 根據request傳來的command回傳message * * @param event * @return */ @Override public Message processCommand(MessageEvent<TextMessageContent> event) { String command = event.getMessage().getText(); String senderId = event.getSource().getSenderId(); String userId = event.getSource().getUserId(); CommandBean commandBean = this.genCommandBean(command, senderId, userId); if (commandBean == null) { return null; } if (commandBean instanceof WoWCommandBean) { return processWoWCommand((WoWCommandBean)commandBean); } if (commandBean instanceof IrolCommandBean) { return processIrolCommand((IrolCommandBean)commandBean); } if (commandBean instanceof OtherCommandBean) { return processOtherCommand((OtherCommandBean)commandBean); } return null; } /** * * @param commandBean * @return */ private Message processOtherCommand(OtherCommandBean commandBean) { //other command if (StringUtils.isNotBlank(commandBean.getErrorMsg())) { return new TextMessage(commandBean.getErrorMsg()); } else { switch (commandBean.getEventEnum()) { case ROLL: return this.getRollMessage(commandBean.getCommand().toLowerCase().replace(NudoCCUtil.ROLL_COMMAND, StringUtils.EMPTY), commandBean.getSenderId()); case GET_USER_ID: return new TextMessage(NudoCCUtil.codeMessage("OTR001", commandBean.getSenderId(), commandBean.getUserId())); case LEAVE: leave(commandBean.getSenderId()); return null; case WHOAMI: return getWoWNameById(commandBean.getUserId()); case WCL_USER: String[] array = commandBean.getCommand().split(NudoCCUtil.codeMessage("OTR002")); return getCharacterWCLByUserId(array[0], array[1], commandBean.getUserId()); case IMG1: return findStickerMessage("3", "181"); case TALKING: return processUserTalk(commandBean.getCommand(), commandBean.getUserId()); case USER_ROLL_START: return updateUserRoll(commandBean.getSenderId(), true); case USER_ROLL_END: return updateUserRoll(commandBean.getSenderId(), false); case EMOJI: return getEmojiMessage(); case PARROT: return getParrotImage(commandBean.getCommand().replace(NudoCCUtil.PARROT_COMMAND, StringUtils.EMPTY)); default: return null; } } } /** * * @param msg * @return */ private Message getParrotImage(String msg) { LOG.info("getParrotImage msg=" + msg); try { msg = URLEncoder.encode(msg, "UTF-8"); } catch (UnsupportedEncodingException e) { LOG.error(e.getMessage()); return null; } String img = System.getenv("ROOT_PATH") + "/API/parrot/" + msg; LOG.info("getParrotImage path=" + img); return new ImageMessage(img, img); } /** * get emoji message * * @return */ private TextMessage getEmojiMessage() { return new TextMessage(NudoCCUtil.codeMessage("OTR007")); } /** * * @param senderId * @param status * @return */ private Message updateUserRoll(String senderId, boolean status) { UserImgFunc userImgFunc = userImgFuncDao.findOne(senderId); if (userImgFunc == null) { userImgFunc = new UserImgFunc(); userImgFunc.setLineId(senderId); } userImgFunc.setStatus(status); userImgFuncDao.save(userImgFunc); return new TextMessage("hs..hs.."); } /** * process wow command * * @param commandBean * @return */ @Override public Message processWoWCommand(WoWCommandBean commandBean) { //wow command if (StringUtils.isNotBlank(commandBean.getErrorMsg())) { return new TextMessage(commandBean.getErrorMsg()); } else { switch (commandBean.getEventEnum()) { case HELP: return wowService.getHelp(); case PROFILE: return wowService.buildCharacterTemplate(commandBean.getName()); case IMG: return wowService.getWoWCharacterImgPath(commandBean.getName()); case CHARACTER_ITEM: return wowService.getWoWCharacterItems(commandBean.getName(), commandBean.getRealm()); case CHECK_ENCHANTS: return wowService.checkCharacterEnchants(commandBean.getName(), commandBean.getRealm()); case WCL: return wowService.getCharacterWCL(commandBean.getName(), commandBean.getRealm(), commandBean.getLocation(), commandBean.getMetric(), commandBean.getMode()); case MAPPING_A: return wowService.saveCharacter(commandBean.getName(), commandBean.getRealm(), commandBean.getLocation(), commandBean.getUserId()); case TEST: //TODO ... default: return null; } } } /** * process irol command * * @param commandBean * @return */ @Override public Message processIrolCommand(IrolCommandBean commandBean) { //irol command if (StringUtils.isNotBlank(commandBean.getErrorMsg())) { return new TextMessage(commandBean.getErrorMsg()); } else { switch (commandBean.getEventEnum()) { case OPEN: return irolService.getIrols(commandBean.getUserId()); case BATTLE: return irolService.doBattle(commandBean.getUserId(), commandBean.getIrolName()); case FIGHT: return irolService.doFight(commandBean.getUserId(), commandBean.getIrolId(), commandBean.getMonsterId()); case SKILL: return irolService.doSkill(commandBean.getUserId(), commandBean.getIrolId(), commandBean.getMonsterId(), commandBean.getSkillId()); default: return null; } } } /** * return message by talking count * * @param mesg * @param userId * @return */ private Message processUserTalk(String mesg, String userId) { if (StringUtils.isBlank(userId)) { return null; } UserTalkLevelKey key = new UserTalkLevelKey(userId, mesg); UserTalkLevel userTalkLevel = userTalkLevelDao.findOne(key); if (userTalkLevel != null) { try { userTalkLevel.setTalkCount(userTalkLevel.getTalkCount()+1); userTalkLevelDao.save(userTalkLevel); String displayName = getDisplayName(userId); switch (userTalkLevel.getTalkCount()) { case 10: return new TextMessage(NudoCCUtil.codeMessage("OTR003", displayName, mesg, mesg)); case 25: return new TextMessage(NudoCCUtil.codeMessage("OTR004", displayName, mesg, mesg)); case 50: return new TextMessage(NudoCCUtil.codeMessage("OTR005", displayName, mesg, mesg)); case 99: return new TextMessage(NudoCCUtil.codeMessage("OTR006", displayName, mesg, mesg)); default: break; } } catch (Exception e) { LOG.error("processUserTalk error!", e); } } else { userTalkLevel = new UserTalkLevel(userId, mesg); userTalkLevel.setTalkCount(1); userTalkLevelDao.save(userTalkLevel); } return null; } /** * * @param mode * @param metric * @param userId * @return */ private Message getCharacterWCLByUserId(String mode, String metric, String userId) { if (StringUtils.isBlank(userId)) { return new TextMessage(NudoCCUtil.codeMessage("COM001")); } try { WoWCharacterMapping po = wowCharacterMappingDao.findOne(userId); if (po == null) { return new TextMessage(NudoCCUtil.codeMessage("COM002")); } return wowService.getCharacterWCL(po.getName(), po.getRealm(), po.getLocation(), metric, mode); } catch (Exception e) { return null; } } /** * get mapping wow name by line id * * @param userId * @return */ private Message getWoWNameById(String userId) { if (StringUtils.isBlank(userId)) { return new TextMessage(NudoCCUtil.codeMessage("COM001")); } try { WoWCharacterMapping po = wowCharacterMappingDao.findOne(userId); if (po != null) { return new TextMessage(NudoCCUtil.codeMessage("WOW011", po.getName(), po.getRealm())); } else { return new TextMessage(NudoCCUtil.codeMessage("ERR006")); } } catch (Exception e) { return new TextMessage(NudoCCUtil.codeMessage("ERR007")); } } /** * leave group * * @param groupId */ private void leave(String groupId) { LOG.info("leaveGroup BEGIN"); lineMessagingClient.leaveGroup(groupId); LOG.info("leaveGroup END"); } /** * Roll * * @param command * @return */ private Message getRollMessage(String command, String senderId) { if (StringUtils.isNotBlank(command) && command.indexOf(" ") == 0) { String[] scopes = command.trim().split("-"); if (scopes.length != 2) { return new TextMessage(NudoCCUtil.codeMessage("ERR011")); } else { // validate start & end int start, end = 0; try { start = Integer.parseInt(scopes[0]); end = Integer.parseInt(scopes[1]); if (start > end) { return new TextMessage(NudoCCUtil.codeMessage("ERR012")); } if (end > 99999) { return new TextMessage(NudoCCUtil.codeMessage("ERR013")); } } catch (NumberFormatException e) { return new TextMessage(NudoCCUtil.codeMessage("ERR014")); } return this.getRollMessage(start, end, senderId); } } else { return this.getRollMessage(1, 100, senderId); } } /** * get roll number message * * @param start * @param end * @return */ private Message getRollMessage(int start, int end, String senderId) { boolean isUserRoll = isUserRoll(senderId); int point = this.probabilityControl(start, end); if (isUserRoll) { String img = System.getenv("ROOT_PATH") + "/API/img/" + point; return new ImageMessage(img, img); } else { int size = wowBossMaster.getBosses().size(); Random randBoss = new Random(); int index = randBoss.nextInt(size); String name = wowBossMaster.getBosses().get(index).getName(); return new TextMessage(NudoCCUtil.codeMessage("COM004", name, point, start, end)); } } /** * * @param senderId * @return */ private boolean isUserRoll(String senderId) { UserImgFunc userImgFunc = userImgFuncDao.findOne(senderId); return userImgFunc == null ? false : userImgFunc.isStatus(); } /** * probability point by start,end * * @param start * @param end * @return */ private int probabilityControl(int start, int end) { List<Integer> nums = new ArrayList<>(); for (int i = start; i <= end; i++) { nums.add(i); } int[] numsToGenerate = nums.stream().mapToInt(i->i).toArray(); double[] discreteProbabilities = NudoCCUtil.zipfDistribution(end-start+1); int[] result = NudoCCUtil.getIntegerDistribution(numsToGenerate, discreteProbabilities, 1); return result[0]; } }
spring-boot-cc/src/main/java/com/cc/service/impl/NudoCCServiceImpl.java
package com.cc.service.impl; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.ExecutionException; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.cc.Application; import com.cc.bean.CommandBean; import com.cc.bean.IrolCommandBean; import com.cc.bean.OtherCommandBean; import com.cc.bean.WoWCommandBean; import com.cc.dao.UserImgFuncDao; import com.cc.dao.UserTalkLevelDao; import com.cc.dao.WoWCharacterMappingDao; import com.cc.entity.UserImgFunc; import com.cc.entity.UserTalkLevel; import com.cc.entity.WoWCharacterMapping; import com.cc.entity.key.UserTalkLevelKey; import com.cc.enums.OtherEventEnum; import com.cc.service.IIrolService; import com.cc.service.INudoCCService; import com.cc.service.IWoWService; import com.cc.wow.boss.BossMaster; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.linecorp.bot.client.LineMessagingClient; import com.linecorp.bot.client.LineMessagingClientImpl; import com.linecorp.bot.client.LineMessagingService; import com.linecorp.bot.client.LineMessagingServiceBuilder; import com.linecorp.bot.model.event.MessageEvent; import com.linecorp.bot.model.event.message.TextMessageContent; import com.linecorp.bot.model.message.ImageMessage; import com.linecorp.bot.model.message.Message; import com.linecorp.bot.model.message.StickerMessage; import com.linecorp.bot.model.message.TextMessage; import com.linecorp.bot.model.profile.UserProfileResponse; import com.utils.NudoCCUtil; /** * @author Caleb.Cheng * */ @Component public class NudoCCServiceImpl implements INudoCCService { private static BossMaster wowBossMaster; private static final Logger LOG = LoggerFactory.getLogger(NudoCCServiceImpl.class); private LineMessagingClient lineMessagingClient; { LineMessagingService lineMessagingService = LineMessagingServiceBuilder.create(System.getenv("LINE_BOT_CHANNEL_TOKEN")).build(); lineMessagingClient = new LineMessagingClientImpl(lineMessagingService); } static { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try { wowBossMaster = mapper.readValue(Application.class.getResourceAsStream("/wowBoss.json"), new TypeReference<BossMaster>(){}); } catch (IOException e) { e.printStackTrace(); } } @Autowired private IWoWService wowService; @Autowired private WoWCharacterMappingDao wowCharacterMappingDao; @Autowired private IIrolService irolService; @Autowired private UserTalkLevelDao userTalkLevelDao; @Autowired private UserImgFuncDao userImgFuncDao; /** * find line sticker message * * @param packageId * @param stickerId * @return */ @Override public Message findStickerMessage(String packageId, String stickerId) { return new StickerMessage(packageId, stickerId); } /** * get line display name * * @param lineId * @return * @throws InterruptedException * @throws ExecutionException */ @Override public String getDisplayName(String lineId) throws InterruptedException, ExecutionException { UserProfileResponse userProfileResponse = lineMessagingClient.getProfile(lineId).get(); return userProfileResponse.getDisplayName(); } /** * generator command bean * * @param event * @return */ @Override public CommandBean genCommandBean(String command, String senderId, String userId) { if (StringUtils.isBlank(command)) { return null; } if (command.startsWith(NudoCCUtil.WOW_COMMAND)) { return wowService.genWoWCommandBean(command, senderId, userId); } if (irolService.isIrolCommand(command)) { return irolService.genIrolCommandBean(command, senderId, userId); } return this.genOtherCommandBean(command, senderId, userId); } /** * * @param command * @param senderId * @param userId * @return */ private CommandBean genOtherCommandBean(String command, String senderId, String userId) { OtherCommandBean bean = new OtherCommandBean(command, senderId, userId); Pattern pattern = Pattern.compile(NudoCCUtil.WCL_USER_COMMANDS); //other command if (command.toLowerCase().startsWith(NudoCCUtil.ROLL_COMMAND)) { bean.setEventEnum(OtherEventEnum.ROLL); } else if (command.equalsIgnoreCase(NudoCCUtil.GET_USER_ID_COMMAND)) { bean.setEventEnum(OtherEventEnum.GET_USER_ID); } else if (command.equals(NudoCCUtil.LEAVE_COMMAND)) { bean.setEventEnum(OtherEventEnum.LEAVE); } else if (command.equals(NudoCCUtil.WHOAMI_COMMAND)) { bean.setEventEnum(OtherEventEnum.WHOAMI); } else if (pattern.matcher(command.toLowerCase()).matches()) { bean.setEventEnum(OtherEventEnum.WCL_USER); } else if (command.indexOf(NudoCCUtil.IMG1_COMMAND) != -1) { bean.setEventEnum(OtherEventEnum.IMG1); } else if (command.equals(NudoCCUtil.USER_ROLL_START_COMMAND)) { bean.setEventEnum(OtherEventEnum.USER_ROLL_START); } else if (command.equals(NudoCCUtil.USER_ROLL_END_COMMAND)) { bean.setEventEnum(OtherEventEnum.USER_ROLL_END); } else if (command.equals(NudoCCUtil.EMOJI_COMMAND)) { bean.setEventEnum(OtherEventEnum.EMOJI); } else if (command.startsWith(NudoCCUtil.PARROT_COMMAND)) { bean.setEventEnum(OtherEventEnum.PARROT); } else { bean.setEventEnum(OtherEventEnum.TALKING); } return bean; } /** * 根據request傳來的command回傳message * * @param event * @return */ @Override public Message processCommand(MessageEvent<TextMessageContent> event) { String command = event.getMessage().getText(); String senderId = event.getSource().getSenderId(); String userId = event.getSource().getUserId(); CommandBean commandBean = this.genCommandBean(command, senderId, userId); if (commandBean == null) { return null; } if (commandBean instanceof WoWCommandBean) { return processWoWCommand((WoWCommandBean)commandBean); } if (commandBean instanceof IrolCommandBean) { return processIrolCommand((IrolCommandBean)commandBean); } if (commandBean instanceof OtherCommandBean) { return processOtherCommand((OtherCommandBean)commandBean); } return null; } /** * * @param commandBean * @return */ private Message processOtherCommand(OtherCommandBean commandBean) { //other command if (StringUtils.isNotBlank(commandBean.getErrorMsg())) { return new TextMessage(commandBean.getErrorMsg()); } else { switch (commandBean.getEventEnum()) { case ROLL: return this.getRollMessage(commandBean.getCommand().toLowerCase().replace(NudoCCUtil.ROLL_COMMAND, StringUtils.EMPTY), commandBean.getSenderId()); case GET_USER_ID: return new TextMessage(NudoCCUtil.codeMessage("OTR001", commandBean.getSenderId(), commandBean.getUserId())); case LEAVE: leave(commandBean.getSenderId()); return null; case WHOAMI: return getWoWNameById(commandBean.getUserId()); case WCL_USER: String[] array = commandBean.getCommand().split(NudoCCUtil.codeMessage("OTR002")); return getCharacterWCLByUserId(array[0], array[1], commandBean.getUserId()); case IMG1: return findStickerMessage("3", "181"); case TALKING: return processUserTalk(commandBean.getCommand(), commandBean.getUserId()); case USER_ROLL_START: return updateUserRoll(commandBean.getSenderId(), true); case USER_ROLL_END: return updateUserRoll(commandBean.getSenderId(), false); case EMOJI: return getEmojiMessage(); case PARROT: return getParrotImage(commandBean.getCommand().replace(NudoCCUtil.PARROT_COMMAND, StringUtils.EMPTY)); default: return null; } } } /** * * @param msg * @return */ private Message getParrotImage(String msg) { LOG.info("getParrotImage msg=" + msg); boolean hasCh = msg.getBytes().length != msg.length(); if (hasCh) { msg = "I'm_ameriBird!"; } else if (msg.length() > 15) { msg = "fuck_too_long"; } String img = System.getenv("ROOT_PATH") + "/API/parrot/" + msg; LOG.info("getParrotImage path=" + img); return new ImageMessage(img, img); } /** * get emoji message * * @return */ private TextMessage getEmojiMessage() { return new TextMessage(NudoCCUtil.codeMessage("OTR007")); } /** * * @param senderId * @param status * @return */ private Message updateUserRoll(String senderId, boolean status) { UserImgFunc userImgFunc = userImgFuncDao.findOne(senderId); if (userImgFunc == null) { userImgFunc = new UserImgFunc(); userImgFunc.setLineId(senderId); } userImgFunc.setStatus(status); userImgFuncDao.save(userImgFunc); return new TextMessage("hs..hs.."); } /** * process wow command * * @param commandBean * @return */ @Override public Message processWoWCommand(WoWCommandBean commandBean) { //wow command if (StringUtils.isNotBlank(commandBean.getErrorMsg())) { return new TextMessage(commandBean.getErrorMsg()); } else { switch (commandBean.getEventEnum()) { case HELP: return wowService.getHelp(); case PROFILE: return wowService.buildCharacterTemplate(commandBean.getName()); case IMG: return wowService.getWoWCharacterImgPath(commandBean.getName()); case CHARACTER_ITEM: return wowService.getWoWCharacterItems(commandBean.getName(), commandBean.getRealm()); case CHECK_ENCHANTS: return wowService.checkCharacterEnchants(commandBean.getName(), commandBean.getRealm()); case WCL: return wowService.getCharacterWCL(commandBean.getName(), commandBean.getRealm(), commandBean.getLocation(), commandBean.getMetric(), commandBean.getMode()); case MAPPING_A: return wowService.saveCharacter(commandBean.getName(), commandBean.getRealm(), commandBean.getLocation(), commandBean.getUserId()); case TEST: //TODO ... default: return null; } } } /** * process irol command * * @param commandBean * @return */ @Override public Message processIrolCommand(IrolCommandBean commandBean) { //irol command if (StringUtils.isNotBlank(commandBean.getErrorMsg())) { return new TextMessage(commandBean.getErrorMsg()); } else { switch (commandBean.getEventEnum()) { case OPEN: return irolService.getIrols(commandBean.getUserId()); case BATTLE: return irolService.doBattle(commandBean.getUserId(), commandBean.getIrolName()); case FIGHT: return irolService.doFight(commandBean.getUserId(), commandBean.getIrolId(), commandBean.getMonsterId()); case SKILL: return irolService.doSkill(commandBean.getUserId(), commandBean.getIrolId(), commandBean.getMonsterId(), commandBean.getSkillId()); default: return null; } } } /** * return message by talking count * * @param mesg * @param userId * @return */ private Message processUserTalk(String mesg, String userId) { if (StringUtils.isBlank(userId)) { return null; } UserTalkLevelKey key = new UserTalkLevelKey(userId, mesg); UserTalkLevel userTalkLevel = userTalkLevelDao.findOne(key); if (userTalkLevel != null) { try { userTalkLevel.setTalkCount(userTalkLevel.getTalkCount()+1); userTalkLevelDao.save(userTalkLevel); String displayName = getDisplayName(userId); switch (userTalkLevel.getTalkCount()) { case 10: return new TextMessage(NudoCCUtil.codeMessage("OTR003", displayName, mesg, mesg)); case 25: return new TextMessage(NudoCCUtil.codeMessage("OTR004", displayName, mesg, mesg)); case 50: return new TextMessage(NudoCCUtil.codeMessage("OTR005", displayName, mesg, mesg)); case 99: return new TextMessage(NudoCCUtil.codeMessage("OTR006", displayName, mesg, mesg)); default: break; } } catch (Exception e) { LOG.error("processUserTalk error!", e); } } else { userTalkLevel = new UserTalkLevel(userId, mesg); userTalkLevel.setTalkCount(1); userTalkLevelDao.save(userTalkLevel); } return null; } /** * * @param mode * @param metric * @param userId * @return */ private Message getCharacterWCLByUserId(String mode, String metric, String userId) { if (StringUtils.isBlank(userId)) { return new TextMessage(NudoCCUtil.codeMessage("COM001")); } try { WoWCharacterMapping po = wowCharacterMappingDao.findOne(userId); if (po == null) { return new TextMessage(NudoCCUtil.codeMessage("COM002")); } return wowService.getCharacterWCL(po.getName(), po.getRealm(), po.getLocation(), metric, mode); } catch (Exception e) { return null; } } /** * get mapping wow name by line id * * @param userId * @return */ private Message getWoWNameById(String userId) { if (StringUtils.isBlank(userId)) { return new TextMessage(NudoCCUtil.codeMessage("COM001")); } try { WoWCharacterMapping po = wowCharacterMappingDao.findOne(userId); if (po != null) { return new TextMessage(NudoCCUtil.codeMessage("WOW011", po.getName(), po.getRealm())); } else { return new TextMessage(NudoCCUtil.codeMessage("ERR006")); } } catch (Exception e) { return new TextMessage(NudoCCUtil.codeMessage("ERR007")); } } /** * leave group * * @param groupId */ private void leave(String groupId) { LOG.info("leaveGroup BEGIN"); lineMessagingClient.leaveGroup(groupId); LOG.info("leaveGroup END"); } /** * Roll * * @param command * @return */ private Message getRollMessage(String command, String senderId) { if (StringUtils.isNotBlank(command) && command.indexOf(" ") == 0) { String[] scopes = command.trim().split("-"); if (scopes.length != 2) { return new TextMessage(NudoCCUtil.codeMessage("ERR011")); } else { // validate start & end int start, end = 0; try { start = Integer.parseInt(scopes[0]); end = Integer.parseInt(scopes[1]); if (start > end) { return new TextMessage(NudoCCUtil.codeMessage("ERR012")); } if (end > 99999) { return new TextMessage(NudoCCUtil.codeMessage("ERR013")); } } catch (NumberFormatException e) { return new TextMessage(NudoCCUtil.codeMessage("ERR014")); } return this.getRollMessage(start, end, senderId); } } else { return this.getRollMessage(1, 100, senderId); } } /** * get roll number message * * @param start * @param end * @return */ private Message getRollMessage(int start, int end, String senderId) { boolean isUserRoll = isUserRoll(senderId); int point = this.probabilityControl(start, end); if (isUserRoll) { String img = System.getenv("ROOT_PATH") + "/API/img/" + point; return new ImageMessage(img, img); } else { int size = wowBossMaster.getBosses().size(); Random randBoss = new Random(); int index = randBoss.nextInt(size); String name = wowBossMaster.getBosses().get(index).getName(); return new TextMessage(NudoCCUtil.codeMessage("COM004", name, point, start, end)); } } /** * * @param senderId * @return */ private boolean isUserRoll(String senderId) { UserImgFunc userImgFunc = userImgFuncDao.findOne(senderId); return userImgFunc == null ? false : userImgFunc.isStatus(); } /** * probability point by start,end * * @param start * @param end * @return */ private int probabilityControl(int start, int end) { List<Integer> nums = new ArrayList<>(); for (int i = start; i <= end; i++) { nums.add(i); } int[] numsToGenerate = nums.stream().mapToInt(i->i).toArray(); double[] discreteProbabilities = NudoCCUtil.zipfDistribution(end-start+1); int[] result = NudoCCUtil.getIntegerDistribution(numsToGenerate, discreteProbabilities, 1); return result[0]; } }
encode
spring-boot-cc/src/main/java/com/cc/service/impl/NudoCCServiceImpl.java
encode
Java
apache-2.0
0e3b623f6681429985b0d2f3ac025ed32b89346e
0
oplinkoms/onos,gkatsikas/onos,oplinkoms/onos,gkatsikas/onos,opennetworkinglab/onos,oplinkoms/onos,gkatsikas/onos,gkatsikas/onos,gkatsikas/onos,oplinkoms/onos,opennetworkinglab/onos,opennetworkinglab/onos,opennetworkinglab/onos,oplinkoms/onos,opennetworkinglab/onos,opennetworkinglab/onos,gkatsikas/onos,oplinkoms/onos,oplinkoms/onos
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.mastership.impl; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.onlab.util.KryoNamespace; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.Leadership; import org.onosproject.cluster.LeadershipAdminService; import org.onosproject.cluster.LeadershipEvent; import org.onosproject.cluster.LeadershipEventListener; import org.onosproject.cluster.LeadershipService; import org.onosproject.cluster.NodeId; import org.onosproject.cluster.RoleInfo; import org.onosproject.mastership.MastershipEvent; import org.onosproject.mastership.MastershipInfo; import org.onosproject.mastership.MastershipStore; import org.onosproject.mastership.MastershipStoreDelegate; import org.onosproject.mastership.MastershipTerm; import org.onosproject.net.DeviceId; import org.onosproject.net.MastershipRole; import org.onosproject.store.AbstractStore; import org.onosproject.store.cluster.messaging.ClusterCommunicationService; import org.onosproject.store.cluster.messaging.MessageSubject; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.service.Serializer; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.slf4j.Logger; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkArgument; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.mastership.MastershipEvent.Type.BACKUPS_CHANGED; import static org.onosproject.mastership.MastershipEvent.Type.MASTER_CHANGED; import static org.onosproject.mastership.MastershipEvent.Type.RESTORED; import static org.onosproject.mastership.MastershipEvent.Type.SUSPENDED; import static org.slf4j.LoggerFactory.getLogger; /** * Implementation of the MastershipStore on top of Leadership Service. */ @Component(immediate = true, service = MastershipStore.class) public class ConsistentDeviceMastershipStore extends AbstractStore<MastershipEvent, MastershipStoreDelegate> implements MastershipStore { private final Logger log = getLogger(getClass()); @Reference(cardinality = ReferenceCardinality.MANDATORY) protected LeadershipService leadershipService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected LeadershipAdminService leadershipAdminService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected ClusterCommunicationService clusterCommunicator; private NodeId localNodeId; private static final MessageSubject ROLE_RELINQUISH_SUBJECT = new MessageSubject("mastership-store-device-role-relinquish"); private static final String DEVICE_MASTERSHIP_TOPIC_PREFIX = "device-mastership:"; private static final Pattern DEVICE_MASTERSHIP_TOPIC_PATTERN = Pattern.compile("^" + DEVICE_MASTERSHIP_TOPIC_PREFIX + "(.*)"); private ExecutorService eventHandler; private ExecutorService messageHandlingExecutor; private ScheduledExecutorService transferExecutor; private final LeadershipEventListener leadershipEventListener = new InternalDeviceMastershipEventListener(); private static final String NODE_ID_NULL = "Node ID cannot be null"; private static final String DEVICE_ID_NULL = "Device ID cannot be null"; private static final int WAIT_BEFORE_MASTERSHIP_HANDOFF_MILLIS = 3000; public static final Serializer SERIALIZER = Serializer.using( KryoNamespace.newBuilder() .register(KryoNamespaces.API) .register(MastershipRole.class) .register(MastershipEvent.class) .register(MastershipEvent.Type.class) .build("MastershipStore")); @Activate public void activate() { eventHandler = Executors.newSingleThreadExecutor( groupedThreads("onos/store/device/mastership", "event-handler", log)); messageHandlingExecutor = Executors.newSingleThreadExecutor( groupedThreads("onos/store/device/mastership", "message-handler", log)); transferExecutor = Executors.newSingleThreadScheduledExecutor( groupedThreads("onos/store/device/mastership", "mastership-transfer-executor", log)); clusterCommunicator.addSubscriber(ROLE_RELINQUISH_SUBJECT, SERIALIZER::decode, this::relinquishLocalRole, SERIALIZER::encode, messageHandlingExecutor); localNodeId = clusterService.getLocalNode().id(); leadershipService.addListener(leadershipEventListener); log.info("Started"); } @Deactivate public void deactivate() { clusterCommunicator.removeSubscriber(ROLE_RELINQUISH_SUBJECT); leadershipService.removeListener(leadershipEventListener); messageHandlingExecutor.shutdown(); transferExecutor.shutdown(); eventHandler.shutdown(); log.info("Stopped"); } @Override public CompletableFuture<MastershipRole> requestRole(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); Leadership leadership = leadershipService.runForLeadership(leadershipTopic); NodeId leader = leadership == null ? null : leadership.leaderNodeId(); List<NodeId> candidates = leadership == null ? ImmutableList.of() : ImmutableList.copyOf(leadership.candidates()); MastershipRole role = Objects.equals(localNodeId, leader) ? MastershipRole.MASTER : candidates.contains(localNodeId) ? MastershipRole.STANDBY : MastershipRole.NONE; return CompletableFuture.completedFuture(role); } @Override public MastershipRole getRole(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); Leadership leadership = leadershipService.getLeadership(leadershipTopic); NodeId leader = leadership == null ? null : leadership.leaderNodeId(); List<NodeId> candidates = leadership == null ? ImmutableList.of() : ImmutableList.copyOf(leadership.candidates()); return Objects.equals(nodeId, leader) ? MastershipRole.MASTER : candidates.contains(nodeId) ? MastershipRole.STANDBY : MastershipRole.NONE; } @Override public NodeId getMaster(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); return leadershipService.getLeader(createDeviceMastershipTopic(deviceId)); } @Override public RoleInfo getNodes(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); MastershipInfo mastership = getMastership(deviceId); return new RoleInfo(mastership.master().orElse(null), mastership.backups()); } @Override public MastershipInfo getMastership(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); Leadership leadership = leadershipService.getLeadership(createDeviceMastershipTopic(deviceId)); return buildMastershipFromLeadership(leadership); } @Override public Set<DeviceId> getDevices(NodeId nodeId) { checkArgument(nodeId != null, NODE_ID_NULL); // FIXME This result contains REMOVED device. // MastershipService cannot listen to DeviceEvent to GC removed topic, // since DeviceManager depend on it. // Reference count, etc. at LeadershipService layer? return leadershipService .ownedTopics(nodeId) .stream() .filter(this::isDeviceMastershipTopic) .map(this::extractDeviceIdFromTopic) .collect(Collectors.toSet()); } @Override public CompletableFuture<MastershipEvent> setMaster(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); if (leadershipAdminService.promoteToTopOfCandidateList(leadershipTopic, nodeId)) { transferExecutor.schedule(() -> leadershipAdminService.transferLeadership(leadershipTopic, nodeId), WAIT_BEFORE_MASTERSHIP_HANDOFF_MILLIS, TimeUnit.MILLISECONDS); } return CompletableFuture.completedFuture(null); } @Override public MastershipTerm getTermFor(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); Leadership leadership = leadershipService.getLeadership(leadershipTopic); return leadership != null && leadership.leaderNodeId() != null ? MastershipTerm.of(leadership.leaderNodeId(), leadership.leader().term()) : null; } @Override public CompletableFuture<MastershipEvent> setStandby(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); NodeId currentMaster = getMaster(deviceId); if (!nodeId.equals(currentMaster)) { return CompletableFuture.completedFuture(null); } String leadershipTopic = createDeviceMastershipTopic(deviceId); List<NodeId> candidates = leadershipService.getCandidates(leadershipTopic); NodeId newMaster = candidates.stream() .filter(candidate -> !Objects.equals(nodeId, candidate)) .findFirst() .orElse(null); log.info("Transitioning to role {} for {}. Next master: {}", newMaster != null ? MastershipRole.STANDBY : MastershipRole.NONE, deviceId, newMaster); if (newMaster != null) { return setMaster(newMaster, deviceId); } return relinquishRole(nodeId, deviceId); } @Override public CompletableFuture<MastershipEvent> relinquishRole(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); if (nodeId.equals(localNodeId)) { return relinquishLocalRole(deviceId); } log.debug("Forwarding request to relinquish " + "role for device {} to {}", deviceId, nodeId); return clusterCommunicator.sendAndReceive( deviceId, ROLE_RELINQUISH_SUBJECT, SERIALIZER::encode, SERIALIZER::decode, nodeId); } private CompletableFuture<MastershipEvent> relinquishLocalRole(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); if (!leadershipService.getCandidates(leadershipTopic).contains(localNodeId)) { return CompletableFuture.completedFuture(null); } MastershipEvent.Type eventType = localNodeId.equals(leadershipService.getLeader(leadershipTopic)) ? MastershipEvent.Type.MASTER_CHANGED : MastershipEvent.Type.BACKUPS_CHANGED; leadershipService.withdraw(leadershipTopic); return CompletableFuture.completedFuture(new MastershipEvent(eventType, deviceId, getMastership(deviceId))); } @Override public void relinquishAllRole(NodeId nodeId) { // Noop. LeadershipService already takes care of detecting and purging stale locks. } private MastershipInfo buildMastershipFromLeadership(Leadership leadership) { ImmutableMap.Builder<NodeId, MastershipRole> builder = ImmutableMap.builder(); if (leadership.leaderNodeId() != null) { builder.put(leadership.leaderNodeId(), MastershipRole.MASTER); } leadership.candidates().stream() .filter(nodeId -> !Objects.equals(leadership.leaderNodeId(), nodeId)) .forEach(nodeId -> builder.put(nodeId, MastershipRole.STANDBY)); clusterService.getNodes().stream() .filter(node -> !Objects.equals(leadership.leaderNodeId(), node.id())) .filter(node -> !leadership.candidates().contains(node.id())) .forEach(node -> builder.put(node.id(), MastershipRole.NONE)); return new MastershipInfo( leadership.leader() != null ? leadership.leader().term() : 0, leadership.leader() != null ? Optional.of(leadership.leader().nodeId()) : Optional.empty(), builder.build()); } private class InternalDeviceMastershipEventListener implements LeadershipEventListener { @Override public boolean isRelevant(LeadershipEvent event) { Leadership leadership = event.subject(); return isDeviceMastershipTopic(leadership.topic()); } @Override public void event(LeadershipEvent event) { eventHandler.execute(() -> handleEvent(event)); } private void handleEvent(LeadershipEvent event) { Leadership leadership = event.subject(); DeviceId deviceId = extractDeviceIdFromTopic(leadership.topic()); MastershipInfo mastershipInfo = event.type() != LeadershipEvent.Type.SERVICE_DISRUPTED ? buildMastershipFromLeadership(event.subject()) : new MastershipInfo(); switch (event.type()) { case LEADER_AND_CANDIDATES_CHANGED: notifyDelegate(new MastershipEvent(BACKUPS_CHANGED, deviceId, mastershipInfo)); notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, mastershipInfo)); break; case LEADER_CHANGED: notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, mastershipInfo)); break; case CANDIDATES_CHANGED: notifyDelegate(new MastershipEvent(BACKUPS_CHANGED, deviceId, mastershipInfo)); break; case SERVICE_DISRUPTED: notifyDelegate(new MastershipEvent(SUSPENDED, deviceId, mastershipInfo)); break; case SERVICE_RESTORED: notifyDelegate(new MastershipEvent(RESTORED, deviceId, mastershipInfo)); break; default: return; } } } private String createDeviceMastershipTopic(DeviceId deviceId) { return String.format("%s%s", DEVICE_MASTERSHIP_TOPIC_PREFIX, deviceId.toString()); } private DeviceId extractDeviceIdFromTopic(String topic) { Matcher m = DEVICE_MASTERSHIP_TOPIC_PATTERN.matcher(topic); if (m.matches()) { return DeviceId.deviceId(m.group(1)); } else { throw new IllegalArgumentException("Invalid device mastership topic: " + topic); } } private boolean isDeviceMastershipTopic(String topic) { Matcher m = DEVICE_MASTERSHIP_TOPIC_PATTERN.matcher(topic); return m.matches(); } }
core/store/dist/src/main/java/org/onosproject/store/mastership/impl/ConsistentDeviceMastershipStore.java
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.mastership.impl; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.onlab.util.KryoNamespace; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.Leadership; import org.onosproject.cluster.LeadershipAdminService; import org.onosproject.cluster.LeadershipEvent; import org.onosproject.cluster.LeadershipEventListener; import org.onosproject.cluster.LeadershipService; import org.onosproject.cluster.NodeId; import org.onosproject.cluster.RoleInfo; import org.onosproject.mastership.MastershipEvent; import org.onosproject.mastership.MastershipInfo; import org.onosproject.mastership.MastershipStore; import org.onosproject.mastership.MastershipStoreDelegate; import org.onosproject.mastership.MastershipTerm; import org.onosproject.net.DeviceId; import org.onosproject.net.MastershipRole; import org.onosproject.store.AbstractStore; import org.onosproject.store.cluster.messaging.ClusterCommunicationService; import org.onosproject.store.cluster.messaging.MessageSubject; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.service.Serializer; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.slf4j.Logger; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkArgument; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.mastership.MastershipEvent.Type.BACKUPS_CHANGED; import static org.onosproject.mastership.MastershipEvent.Type.MASTER_CHANGED; import static org.onosproject.mastership.MastershipEvent.Type.RESTORED; import static org.onosproject.mastership.MastershipEvent.Type.SUSPENDED; import static org.slf4j.LoggerFactory.getLogger; /** * Implementation of the MastershipStore on top of Leadership Service. */ @Component(immediate = true, service = MastershipStore.class) public class ConsistentDeviceMastershipStore extends AbstractStore<MastershipEvent, MastershipStoreDelegate> implements MastershipStore { private final Logger log = getLogger(getClass()); @Reference(cardinality = ReferenceCardinality.MANDATORY) protected LeadershipService leadershipService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected LeadershipAdminService leadershipAdminService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected ClusterCommunicationService clusterCommunicator; private NodeId localNodeId; private static final MessageSubject ROLE_RELINQUISH_SUBJECT = new MessageSubject("mastership-store-device-role-relinquish"); private static final Pattern DEVICE_MASTERSHIP_TOPIC_PATTERN = Pattern.compile("^device:(.*)"); private ExecutorService eventHandler; private ExecutorService messageHandlingExecutor; private ScheduledExecutorService transferExecutor; private final LeadershipEventListener leadershipEventListener = new InternalDeviceMastershipEventListener(); private static final String NODE_ID_NULL = "Node ID cannot be null"; private static final String DEVICE_ID_NULL = "Device ID cannot be null"; private static final int WAIT_BEFORE_MASTERSHIP_HANDOFF_MILLIS = 3000; public static final Serializer SERIALIZER = Serializer.using( KryoNamespace.newBuilder() .register(KryoNamespaces.API) .register(MastershipRole.class) .register(MastershipEvent.class) .register(MastershipEvent.Type.class) .build("MastershipStore")); @Activate public void activate() { eventHandler = Executors.newSingleThreadExecutor( groupedThreads("onos/store/device/mastership", "event-handler", log)); messageHandlingExecutor = Executors.newSingleThreadExecutor( groupedThreads("onos/store/device/mastership", "message-handler", log)); transferExecutor = Executors.newSingleThreadScheduledExecutor( groupedThreads("onos/store/device/mastership", "mastership-transfer-executor", log)); clusterCommunicator.addSubscriber(ROLE_RELINQUISH_SUBJECT, SERIALIZER::decode, this::relinquishLocalRole, SERIALIZER::encode, messageHandlingExecutor); localNodeId = clusterService.getLocalNode().id(); leadershipService.addListener(leadershipEventListener); log.info("Started"); } @Deactivate public void deactivate() { clusterCommunicator.removeSubscriber(ROLE_RELINQUISH_SUBJECT); leadershipService.removeListener(leadershipEventListener); messageHandlingExecutor.shutdown(); transferExecutor.shutdown(); eventHandler.shutdown(); log.info("Stopped"); } @Override public CompletableFuture<MastershipRole> requestRole(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); Leadership leadership = leadershipService.runForLeadership(leadershipTopic); NodeId leader = leadership == null ? null : leadership.leaderNodeId(); List<NodeId> candidates = leadership == null ? ImmutableList.of() : ImmutableList.copyOf(leadership.candidates()); MastershipRole role = Objects.equals(localNodeId, leader) ? MastershipRole.MASTER : candidates.contains(localNodeId) ? MastershipRole.STANDBY : MastershipRole.NONE; return CompletableFuture.completedFuture(role); } @Override public MastershipRole getRole(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); Leadership leadership = leadershipService.getLeadership(leadershipTopic); NodeId leader = leadership == null ? null : leadership.leaderNodeId(); List<NodeId> candidates = leadership == null ? ImmutableList.of() : ImmutableList.copyOf(leadership.candidates()); return Objects.equals(nodeId, leader) ? MastershipRole.MASTER : candidates.contains(nodeId) ? MastershipRole.STANDBY : MastershipRole.NONE; } @Override public NodeId getMaster(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); return leadershipService.getLeader(createDeviceMastershipTopic(deviceId)); } @Override public RoleInfo getNodes(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); MastershipInfo mastership = getMastership(deviceId); return new RoleInfo(mastership.master().orElse(null), mastership.backups()); } @Override public MastershipInfo getMastership(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); Leadership leadership = leadershipService.getLeadership(createDeviceMastershipTopic(deviceId)); return buildMastershipFromLeadership(leadership); } @Override public Set<DeviceId> getDevices(NodeId nodeId) { checkArgument(nodeId != null, NODE_ID_NULL); // FIXME This result contains REMOVED device. // MastershipService cannot listen to DeviceEvent to GC removed topic, // since DeviceManager depend on it. // Reference count, etc. at LeadershipService layer? return leadershipService .ownedTopics(nodeId) .stream() .filter(this::isDeviceMastershipTopic) .map(this::extractDeviceIdFromTopic) .collect(Collectors.toSet()); } @Override public CompletableFuture<MastershipEvent> setMaster(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); if (leadershipAdminService.promoteToTopOfCandidateList(leadershipTopic, nodeId)) { transferExecutor.schedule(() -> leadershipAdminService.transferLeadership(leadershipTopic, nodeId), WAIT_BEFORE_MASTERSHIP_HANDOFF_MILLIS, TimeUnit.MILLISECONDS); } return CompletableFuture.completedFuture(null); } @Override public MastershipTerm getTermFor(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); Leadership leadership = leadershipService.getLeadership(leadershipTopic); return leadership != null && leadership.leaderNodeId() != null ? MastershipTerm.of(leadership.leaderNodeId(), leadership.leader().term()) : null; } @Override public CompletableFuture<MastershipEvent> setStandby(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); NodeId currentMaster = getMaster(deviceId); if (!nodeId.equals(currentMaster)) { return CompletableFuture.completedFuture(null); } String leadershipTopic = createDeviceMastershipTopic(deviceId); List<NodeId> candidates = leadershipService.getCandidates(leadershipTopic); NodeId newMaster = candidates.stream() .filter(candidate -> !Objects.equals(nodeId, candidate)) .findFirst() .orElse(null); log.info("Transitioning to role {} for {}. Next master: {}", newMaster != null ? MastershipRole.STANDBY : MastershipRole.NONE, deviceId, newMaster); if (newMaster != null) { return setMaster(newMaster, deviceId); } return relinquishRole(nodeId, deviceId); } @Override public CompletableFuture<MastershipEvent> relinquishRole(NodeId nodeId, DeviceId deviceId) { checkArgument(nodeId != null, NODE_ID_NULL); checkArgument(deviceId != null, DEVICE_ID_NULL); if (nodeId.equals(localNodeId)) { return relinquishLocalRole(deviceId); } log.debug("Forwarding request to relinquish " + "role for device {} to {}", deviceId, nodeId); return clusterCommunicator.sendAndReceive( deviceId, ROLE_RELINQUISH_SUBJECT, SERIALIZER::encode, SERIALIZER::decode, nodeId); } private CompletableFuture<MastershipEvent> relinquishLocalRole(DeviceId deviceId) { checkArgument(deviceId != null, DEVICE_ID_NULL); String leadershipTopic = createDeviceMastershipTopic(deviceId); if (!leadershipService.getCandidates(leadershipTopic).contains(localNodeId)) { return CompletableFuture.completedFuture(null); } MastershipEvent.Type eventType = localNodeId.equals(leadershipService.getLeader(leadershipTopic)) ? MastershipEvent.Type.MASTER_CHANGED : MastershipEvent.Type.BACKUPS_CHANGED; leadershipService.withdraw(leadershipTopic); return CompletableFuture.completedFuture(new MastershipEvent(eventType, deviceId, getMastership(deviceId))); } @Override public void relinquishAllRole(NodeId nodeId) { // Noop. LeadershipService already takes care of detecting and purging stale locks. } private MastershipInfo buildMastershipFromLeadership(Leadership leadership) { ImmutableMap.Builder<NodeId, MastershipRole> builder = ImmutableMap.builder(); if (leadership.leaderNodeId() != null) { builder.put(leadership.leaderNodeId(), MastershipRole.MASTER); } leadership.candidates().stream() .filter(nodeId -> !Objects.equals(leadership.leaderNodeId(), nodeId)) .forEach(nodeId -> builder.put(nodeId, MastershipRole.STANDBY)); clusterService.getNodes().stream() .filter(node -> !Objects.equals(leadership.leaderNodeId(), node.id())) .filter(node -> !leadership.candidates().contains(node.id())) .forEach(node -> builder.put(node.id(), MastershipRole.NONE)); return new MastershipInfo( leadership.leader() != null ? leadership.leader().term() : 0, leadership.leader() != null ? Optional.of(leadership.leader().nodeId()) : Optional.empty(), builder.build()); } private class InternalDeviceMastershipEventListener implements LeadershipEventListener { @Override public boolean isRelevant(LeadershipEvent event) { Leadership leadership = event.subject(); return isDeviceMastershipTopic(leadership.topic()); } @Override public void event(LeadershipEvent event) { eventHandler.execute(() -> handleEvent(event)); } private void handleEvent(LeadershipEvent event) { Leadership leadership = event.subject(); DeviceId deviceId = extractDeviceIdFromTopic(leadership.topic()); MastershipInfo mastershipInfo = event.type() != LeadershipEvent.Type.SERVICE_DISRUPTED ? buildMastershipFromLeadership(event.subject()) : new MastershipInfo(); switch (event.type()) { case LEADER_AND_CANDIDATES_CHANGED: notifyDelegate(new MastershipEvent(BACKUPS_CHANGED, deviceId, mastershipInfo)); notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, mastershipInfo)); break; case LEADER_CHANGED: notifyDelegate(new MastershipEvent(MASTER_CHANGED, deviceId, mastershipInfo)); break; case CANDIDATES_CHANGED: notifyDelegate(new MastershipEvent(BACKUPS_CHANGED, deviceId, mastershipInfo)); break; case SERVICE_DISRUPTED: notifyDelegate(new MastershipEvent(SUSPENDED, deviceId, mastershipInfo)); break; case SERVICE_RESTORED: notifyDelegate(new MastershipEvent(RESTORED, deviceId, mastershipInfo)); break; default: return; } } } private String createDeviceMastershipTopic(DeviceId deviceId) { return String.format("device:%s", deviceId.toString()); } private DeviceId extractDeviceIdFromTopic(String topic) { Matcher m = DEVICE_MASTERSHIP_TOPIC_PATTERN.matcher(topic); if (m.matches()) { return DeviceId.deviceId(m.group(1)); } else { throw new IllegalArgumentException("Invalid device mastership topic: " + topic); } } private boolean isDeviceMastershipTopic(String topic) { Matcher m = DEVICE_MASTERSHIP_TOPIC_PATTERN.matcher(topic); return m.matches(); } }
Reduce the chance of apps triggering fake mastership events The GeneralDeviceProvider works with device IDs with prefix "device:", which is the same leadership topic prefix used by the Mastership service. This caused an issue when any app was creating leadership contests with topic deviceId.toString() (e.g. XConnectManager, DefaultRoutingHandler, etc), as the resulting leadership events where picked by the mastership service and propagated, because of the "device:" prefix. This patch minimizes the occurrence of such issue by choosing a more specific leadership topic prefix for the mastership service. However, the right solution would be to add isolation of leadership contests between different services/apps. Change-Id: I333fd9796a66bb4ca04cd2facd337ac57a2947b2
core/store/dist/src/main/java/org/onosproject/store/mastership/impl/ConsistentDeviceMastershipStore.java
Reduce the chance of apps triggering fake mastership events
Java
apache-2.0
6f104dfca25c1a31b49da86701a2c82f6a7b2ece
0
nengxu/OrientDB,vivosys/orientdb,vivosys/orientdb,nengxu/OrientDB,vivosys/orientdb,vivosys/orientdb,nengxu/OrientDB,nengxu/OrientDB
package com.orientechnologies.orient.test.database.speed; import java.util.Set; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.orientechnologies.orient.core.db.graph.OGraphDatabase; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.intent.OIntentMassiveInsert; import com.orientechnologies.orient.core.record.impl.ODocument; public class GraphDatabaseSuperNodeSpeedTest { private static final String DEFAULT_DB_URL = "local:database/graphtest"; private static final String DEFAULT_DB_USER = "admin"; private static final String DEFAULT_DB_PASSWORD = "admin"; private static final int MAX = 1000000; private OGraphDatabase database; @BeforeClass public void setUpClass() { database = new OGraphDatabase(DEFAULT_DB_URL); if (database.exists()) database.delete(); database.create(); database.close(); database.open(DEFAULT_DB_USER, DEFAULT_DB_PASSWORD); } @AfterClass public void tearDownClass() { database.close(); } @Test public void saveEdges() { database.declareIntent(new OIntentMassiveInsert()); ODocument v = database.createVertex(); v.field("name", "superNode"); long insertBegin = System.currentTimeMillis(); long begin = insertBegin; for (int i = 1; i <= MAX; ++i) { database.createEdge(v, database.createVertex().field("id", i)).save(); if (i % 100000 == 0) { final long now = System.currentTimeMillis(); System.out.printf("\nInserted %d edges, elapsed %d ms. v.out=%d", i, now - begin, ((Set<?>) v.field("out")).size()); begin = System.currentTimeMillis(); } } long now = System.currentTimeMillis(); System.out.printf("\nInsertion completed in %dms. DB edges %d, DB vertices %d", now - insertBegin, database.countEdges(), database.countVertexes()); int i = 1; for (OIdentifiable e : database.getOutEdges(v)) { Assert.assertEquals(database.getInVertex(e).field("id"), i); if (i % 100000 == 0) { now = System.currentTimeMillis(); System.out.printf("\nRead %d edges and %d vertices, elapsed %d ms", i, i, now - begin); begin = System.currentTimeMillis(); } i++; } database.declareIntent(null); } }
tests/src/test/java/com/orientechnologies/orient/test/database/speed/GraphDatabaseSuperNodeSpeedTest.java
package com.orientechnologies.orient.test.database.speed; import java.util.Set; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.orientechnologies.orient.core.db.graph.OGraphDatabase; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.intent.OIntentMassiveInsert; import com.orientechnologies.orient.core.record.impl.ODocument; public class GraphDatabaseSuperNodeSpeedTest { private static final String DEFAULT_DB_URL = "local:database/graphtest"; private static final String DEFAULT_DB_USER = "admin"; private static final String DEFAULT_DB_PASSWORD = "admin"; private static final int MAX = 1000000; private OGraphDatabase database; @BeforeClass public void setUpClass() { database = new OGraphDatabase(DEFAULT_DB_URL); if (database.exists()) database.delete(); database.create(); database.close(); database.open(DEFAULT_DB_USER, DEFAULT_DB_PASSWORD); } @AfterClass public void tearDownClass() { database.close(); } @Test public void saveEdges() { database.declareIntent(new OIntentMassiveInsert()); ODocument v = database.createVertex(); v.field("name", "superNode"); long insertBegin = System.currentTimeMillis(); long begin = insertBegin; for (int i = 1; i <= MAX; ++i) { database.createEdge(v, database.createVertex().field("id", i)).save(); if (i % 100000 == 0) { final long now = System.currentTimeMillis(); System.out.printf("\nInserted %d edges, elapsed %d ms. v.out=%d", i, now - begin, ((Set<?>) v.field("out")).size()); begin = System.currentTimeMillis(); } } database.declareIntent(null); long now = System.currentTimeMillis(); System.out.printf("\nInsertion completed in %dms. DB edges %d, DB vertices %d", now - begin, database.countEdges(), database.countVertexes()); int i = 1; for (OIdentifiable e : database.getOutEdges(v)) { Assert.assertEquals(database.getInVertex(e).field("id"), i); if (i % 100000 == 0) { now = System.currentTimeMillis(); System.out.printf("\nRead %d edges and %d vertices, elapsed %d ms", i, i, now - begin); begin = System.currentTimeMillis(); } i++; } } }
Updated test git-svn-id: 9ddf022f45b579842a47abc018ed2b18cdc52108@4806 3625ad7b-9c83-922f-a72b-73d79161f2ea
tests/src/test/java/com/orientechnologies/orient/test/database/speed/GraphDatabaseSuperNodeSpeedTest.java
Updated test
Java
apache-2.0
253e3f1b220d006b9c16ebb64ef1bb08d5961176
0
YoungDigitalPlanet/empiria.player,YoungDigitalPlanet/empiria.player,YoungDigitalPlanet/empiria.player
package eu.ydp.empiria.player.client.controller; import java.util.ArrayList; import java.util.List; import java.util.Vector; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.json.client.JSONArray; import com.google.gwt.json.client.JSONObject; import com.google.gwt.json.client.JSONValue; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.xml.client.Element; import eu.ydp.empiria.player.client.controller.body.BodyGenerator; import eu.ydp.empiria.player.client.controller.body.InlineBodyGenerator; import eu.ydp.empiria.player.client.controller.body.InlineBodyGeneratorSocket; import eu.ydp.empiria.player.client.controller.body.ModuleEventsListener; import eu.ydp.empiria.player.client.controller.body.ModulesInstalator; import eu.ydp.empiria.player.client.controller.communication.DisplayContentOptions; import eu.ydp.empiria.player.client.controller.events.interaction.FeedbackInteractionSoundEvent; import eu.ydp.empiria.player.client.controller.events.interaction.InteractionEventsListener; import eu.ydp.empiria.player.client.controller.events.interaction.MediaInteractionSoundEvent; import eu.ydp.empiria.player.client.controller.events.interaction.MediaInteractionSoundEventCallback; import eu.ydp.empiria.player.client.controller.events.interaction.StateChangedInteractionEvent; import eu.ydp.empiria.player.client.controller.events.widgets.WidgetWorkflowListener; import eu.ydp.empiria.player.client.controller.variables.objects.response.Response; import eu.ydp.empiria.player.client.module.IActivity; import eu.ydp.empiria.player.client.module.IInteractionModule; import eu.ydp.empiria.player.client.module.ILifecycleModule; import eu.ydp.empiria.player.client.module.ILockable; import eu.ydp.empiria.player.client.module.IModule; import eu.ydp.empiria.player.client.module.IResetable; import eu.ydp.empiria.player.client.module.IStateful; import eu.ydp.empiria.player.client.module.IUniqueModule; import eu.ydp.empiria.player.client.module.ModuleSocket; import eu.ydp.empiria.player.client.module.container.ItemBodyModule; import eu.ydp.empiria.player.client.module.registry.ModulesRegistrySocket; import eu.ydp.empiria.player.client.util.js.JSArrayUtils; public class ItemBody implements IActivity, IStateful, WidgetWorkflowListener { public List<IModule> modules; protected ModuleEventsListener moduleEventsListener; protected DisplayContentOptions options; protected ModuleSocket moduleSocket; protected ModulesRegistrySocket modulesRegistrySocket; private JSONArray stateAsync; private boolean attached = false; private boolean locked = false; private boolean markingAnswers = false; private boolean showingAnswers = false; // private Label traceLabel; public ItemBody(DisplayContentOptions options, ModuleSocket moduleSocket, final InteractionEventsListener interactionEventsListener, ModulesRegistrySocket modulesRegistrySocket) { this.moduleSocket = moduleSocket; this.options = options; this.modulesRegistrySocket = modulesRegistrySocket; moduleEventsListener = new ModuleEventsListener() { @Override public void onStateChanged(boolean procesFeedback, IUniqueModule sender) { interactionEventsListener .onStateChanged(new StateChangedInteractionEvent( procesFeedback, sender)); } @Override public void onFeedbackSoundPlay(String url) { interactionEventsListener .onFeedbackSound(new FeedbackInteractionSoundEvent(url)); } @Override public void onMediaSoundPlay(String url, MediaInteractionSoundEventCallback callback) { interactionEventsListener.onMediaSound(new MediaInteractionSoundEvent(url, callback)); } }; // traceLabel = new Label(); // dom.appendChild(traceLabel.getElement()); } public Widget init(Element itemBodyElement) { ModulesInstalator modulesInstalator = new ModulesInstalator(modulesRegistrySocket, moduleSocket, moduleEventsListener); BodyGenerator generator = new BodyGenerator(modulesInstalator, options); ItemBodyModule itemBodyModule = new ItemBodyModule(); itemBodyModule.initModule(itemBodyElement, moduleSocket, generator); modules = modulesInstalator.installMultiViewUniqueModules(); for (IModule currModule : modules) { if (currModule instanceof IUniqueModule){ Response currResponse = moduleSocket.getResponse( ((IUniqueModule) currModule).getIdentifier() ); if (currResponse != null) currResponse.setModuleAdded(); } } return itemBodyModule.getView(); } // ------------------------- EVENTS -------------------------------- public void onLoad() { for (IModule currModule : modules) { if (currModule instanceof ILifecycleModule) ((ILifecycleModule) currModule).onBodyLoad(); } attached = true; setState(stateAsync); if (locked) markAnswers(true); } @Override public void onUnload() { for (IModule currModule : modules) { if (currModule instanceof ILifecycleModule) ((ILifecycleModule) currModule).onBodyUnload(); } } public int getModuleCount() { return modules.size(); } @Override public void markAnswers(boolean mark) { if (showingAnswers) showCorrectAnswers(false); markingAnswers = mark; for (IModule currModule : modules) { if (currModule instanceof IActivity) ((IActivity) currModule).markAnswers(mark); } } @Override public void showCorrectAnswers(boolean show) { if (markingAnswers) markAnswers(false); showingAnswers = show; for (IModule currModule : modules) { if (currModule instanceof IActivity) ((IActivity) currModule).showCorrectAnswers(show); } } @Override public void reset() { if (showingAnswers) showCorrectAnswers(false); if (markingAnswers) markAnswers(false); if (locked) lock(false); for (IModule currModule : modules) { if (currModule instanceof IResetable) ((IResetable) currModule).reset(); } } @Override public void lock(boolean l) { locked = l; for (IModule currModule : modules) { if (currModule instanceof ILockable) ((ILockable) currModule).lock(l); } } public boolean isLocked() { return locked; } @Override public JSONArray getState() { JSONObject states = new JSONObject(); for (IModule currModule : modules) { if (currModule instanceof IStateful && currModule instanceof IUniqueModule) states.put(((IUniqueModule) currModule).getIdentifier(), ((IStateful) currModule).getState()); } JSONArray statesArr = new JSONArray(); statesArr.set(0, states); return statesArr; } @Override public void setState(JSONArray newState) { if (!attached) { stateAsync = newState; } else { if (newState instanceof JSONArray) { try { if (newState.isArray() != null && newState.isArray().size() > 0){ JSONObject stateObj = newState.isArray().get(0).isObject(); for (int i = 0; i < modules.size(); i++) { if (modules.get(i) instanceof IStateful && modules.get(i) instanceof IUniqueModule) { String curridentifier = ((IUniqueModule) modules .get(i)).getIdentifier(); if (curridentifier != null && curridentifier != "") { if (stateObj.containsKey(curridentifier)) { JSONValue currState = stateObj .get(curridentifier); if (currState != null && currState.isArray() != null) ((IStateful) modules.get(i)) .setState(currState.isArray()); } } } } } } catch (Exception e) { e.printStackTrace(); } } } } public JavaScriptObject getJsSocket() { return createJsSocket(); } private native JavaScriptObject createJsSocket()/*-{ var socket = {}; var instance = this; socket.getModuleSockets = function(){ return [email protected]::getModuleJsSockets()(); } return socket; }-*/; private JavaScriptObject getModuleJsSockets() { eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket[] moduleSockets = getModuleSockets(); JsArray<JavaScriptObject> moduleSocketsJs = JSArrayUtils.createArray(0); for (int i = 0; i < moduleSockets.length; i++) { moduleSocketsJs.set(i, moduleSockets[i].getJsSocket()); } return moduleSocketsJs; } public eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket[] getModuleSockets() { List<eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket> moduleSockets = new ArrayList<eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket>(); for (IModule currModule : modules) { if (currModule instanceof eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket) { moduleSockets .add(((eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket) currModule)); } } return moduleSockets .toArray(new eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket[0]); } }
empiria.player/src/eu/ydp/empiria/player/client/controller/ItemBody.java
package eu.ydp.empiria.player.client.controller; import java.util.ArrayList; import java.util.List; import java.util.Vector; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.json.client.JSONArray; import com.google.gwt.json.client.JSONObject; import com.google.gwt.json.client.JSONValue; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.xml.client.Element; import eu.ydp.empiria.player.client.controller.body.BodyGenerator; import eu.ydp.empiria.player.client.controller.body.InlineBodyGenerator; import eu.ydp.empiria.player.client.controller.body.InlineBodyGeneratorSocket; import eu.ydp.empiria.player.client.controller.body.ModuleEventsListener; import eu.ydp.empiria.player.client.controller.body.ModulesInstalator; import eu.ydp.empiria.player.client.controller.communication.DisplayContentOptions; import eu.ydp.empiria.player.client.controller.events.interaction.FeedbackInteractionSoundEvent; import eu.ydp.empiria.player.client.controller.events.interaction.InteractionEventsListener; import eu.ydp.empiria.player.client.controller.events.interaction.MediaInteractionSoundEvent; import eu.ydp.empiria.player.client.controller.events.interaction.MediaInteractionSoundEventCallback; import eu.ydp.empiria.player.client.controller.events.interaction.StateChangedInteractionEvent; import eu.ydp.empiria.player.client.controller.events.widgets.WidgetWorkflowListener; import eu.ydp.empiria.player.client.controller.variables.objects.response.Response; import eu.ydp.empiria.player.client.module.IActivity; import eu.ydp.empiria.player.client.module.IInteractionModule; import eu.ydp.empiria.player.client.module.ILifecycleModule; import eu.ydp.empiria.player.client.module.IModule; import eu.ydp.empiria.player.client.module.IStateful; import eu.ydp.empiria.player.client.module.IUniqueModule; import eu.ydp.empiria.player.client.module.ModuleSocket; import eu.ydp.empiria.player.client.module.container.ItemBodyModule; import eu.ydp.empiria.player.client.module.registry.ModulesRegistrySocket; import eu.ydp.empiria.player.client.util.js.JSArrayUtils; public class ItemBody implements IActivity, IStateful, WidgetWorkflowListener { public List<IModule> modules; protected ModuleEventsListener moduleEventsListener; protected DisplayContentOptions options; protected ModuleSocket moduleSocket; protected ModulesRegistrySocket modulesRegistrySocket; private JSONArray stateAsync; private boolean attached = false; private boolean locked = false; private boolean markingAnswers = false; private boolean showingAnswers = false; // private Label traceLabel; public ItemBody(DisplayContentOptions options, ModuleSocket moduleSocket, final InteractionEventsListener interactionEventsListener, ModulesRegistrySocket modulesRegistrySocket) { this.moduleSocket = moduleSocket; this.options = options; this.modulesRegistrySocket = modulesRegistrySocket; moduleEventsListener = new ModuleEventsListener() { @Override public void onStateChanged(boolean procesFeedback, IUniqueModule sender) { interactionEventsListener .onStateChanged(new StateChangedInteractionEvent( procesFeedback, sender)); } @Override public void onFeedbackSoundPlay(String url) { interactionEventsListener .onFeedbackSound(new FeedbackInteractionSoundEvent(url)); } @Override public void onMediaSoundPlay(String url, MediaInteractionSoundEventCallback callback) { interactionEventsListener.onMediaSound(new MediaInteractionSoundEvent(url, callback)); } }; // traceLabel = new Label(); // dom.appendChild(traceLabel.getElement()); } public Widget init(Element itemBodyElement) { ModulesInstalator modulesInstalator = new ModulesInstalator(modulesRegistrySocket, moduleSocket, moduleEventsListener); BodyGenerator generator = new BodyGenerator(modulesInstalator, options); ItemBodyModule itemBodyModule = new ItemBodyModule(); itemBodyModule.initModule(itemBodyElement, moduleSocket, generator); modules = modulesInstalator.installMultiViewUniqueModules(); for (IModule currModule : modules) { if (currModule instanceof IUniqueModule){ Response currResponse = moduleSocket.getResponse( ((IUniqueModule) currModule).getIdentifier() ); if (currResponse != null) currResponse.setModuleAdded(); } } return itemBodyModule.getView(); } // ------------------------- EVENTS -------------------------------- public void onLoad() { for (IModule currModule : modules) { if (currModule instanceof ILifecycleModule) ((ILifecycleModule) currModule).onBodyLoad(); } attached = true; setState(stateAsync); if (locked) markAnswers(true); } @Override public void onUnload() { for (IModule currModule : modules) { if (currModule instanceof ILifecycleModule) ((ILifecycleModule) currModule).onBodyUnload(); } } public int getModuleCount() { return modules.size(); } @Override public void markAnswers(boolean mark) { if (showingAnswers) showCorrectAnswers(false); markingAnswers = mark; for (IModule currModule : modules) { if (currModule instanceof IActivity) ((IActivity) currModule).markAnswers(mark); } } @Override public void showCorrectAnswers(boolean show) { if (markingAnswers) markAnswers(false); showingAnswers = show; for (IModule currModule : modules) { if (currModule instanceof IActivity) ((IActivity) currModule).showCorrectAnswers(show); } } @Override public void reset() { if (showingAnswers) showCorrectAnswers(false); if (markingAnswers) markAnswers(false); if (locked) lock(false); for (IModule currModule : modules) { if (currModule instanceof IActivity) ((IActivity) currModule).reset(); } } @Override public void lock(boolean l) { locked = l; for (IModule currModule : modules) { if (currModule instanceof IActivity) ((IActivity) currModule).lock(l); } } public boolean isLocked() { return locked; } @Override public JSONArray getState() { JSONObject states = new JSONObject(); for (IModule currModule : modules) { if (currModule instanceof IStateful && currModule instanceof IUniqueModule) states.put(((IUniqueModule) currModule).getIdentifier(), ((IStateful) currModule).getState()); } JSONArray statesArr = new JSONArray(); statesArr.set(0, states); return statesArr; } @Override public void setState(JSONArray newState) { if (!attached) { stateAsync = newState; } else { if (newState instanceof JSONArray) { try { if (newState.isArray() != null && newState.isArray().size() > 0){ JSONObject stateObj = newState.isArray().get(0).isObject(); for (int i = 0; i < modules.size(); i++) { if (modules.get(i) instanceof IStateful && modules.get(i) instanceof IUniqueModule) { String curridentifier = ((IUniqueModule) modules .get(i)).getIdentifier(); if (curridentifier != null && curridentifier != "") { if (stateObj.containsKey(curridentifier)) { JSONValue currState = stateObj .get(curridentifier); if (currState != null && currState.isArray() != null) ((IStateful) modules.get(i)) .setState(currState.isArray()); } } } } } } catch (Exception e) { e.printStackTrace(); } } } } public JavaScriptObject getJsSocket() { return createJsSocket(); } private native JavaScriptObject createJsSocket()/*-{ var socket = {}; var instance = this; socket.getModuleSockets = function(){ return [email protected]::getModuleJsSockets()(); } return socket; }-*/; private JavaScriptObject getModuleJsSockets() { eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket[] moduleSockets = getModuleSockets(); JsArray<JavaScriptObject> moduleSocketsJs = JSArrayUtils.createArray(0); for (int i = 0; i < moduleSockets.length; i++) { moduleSocketsJs.set(i, moduleSockets[i].getJsSocket()); } return moduleSocketsJs; } public eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket[] getModuleSockets() { List<eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket> moduleSockets = new ArrayList<eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket>(); for (IModule currModule : modules) { if (currModule instanceof eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket) { moduleSockets .add(((eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket) currModule)); } } return moduleSockets .toArray(new eu.ydp.empiria.player.client.controller.communication.sockets.ModuleInterferenceSocket[0]); } }
ILockable and IResetable interfaces in ItemBody task#55706 git-svn-id: fd5e0bdc55a948b8bf0a1d738c6140e6e4400378@94436 3e83ccf1-c6ce-0310-9054-8fdb33cf0640
empiria.player/src/eu/ydp/empiria/player/client/controller/ItemBody.java
ILockable and IResetable interfaces in ItemBody task#55706
Java
apache-2.0
262c708b91dd3de1f892a7c4d198b3cafc319b02
0
chaoyi66/commons-lang,vanta/commons-lang,jacktan1991/commons-lang,Ajeet-Ganga/commons-lang,arbasha/commons-lang,britter/commons-lang,weston100721/commons-lang,MuShiiii/commons-lang,suntengteng/commons-lang,PascalSchumacher/commons-lang,lovecindy/commons-lang,Ajeet-Ganga/commons-lang,MarkDacek/commons-lang,britter/commons-lang,lovecindy/commons-lang,jankill/commons-lang,PascalSchumacher/commons-lang,lovecindy/commons-lang,MuShiiii/commons-lang,suntengteng/commons-lang,jacktan1991/commons-lang,xuerenlv/commons-lang,Ajeet-Ganga/commons-lang,mohanaraosv/commons-lang,xiwc/commons-lang,jankill/commons-lang,xuerenlv/commons-lang,mohanaraosv/commons-lang,xiwc/commons-lang,xiwc/commons-lang,apache/commons-lang,arbasha/commons-lang,byMan/naya279,byMan/naya279,chaoyi66/commons-lang,hollycroxton/commons-lang,MarkDacek/commons-lang,vanta/commons-lang,byMan/naya279,longestname1/commonslang,MuShiiii/commons-lang,jankill/commons-lang,weston100721/commons-lang,britter/commons-lang,mohanaraosv/commons-lang,suntengteng/commons-lang,longestname1/commonslang,xuerenlv/commons-lang,apache/commons-lang,vanta/commons-lang,hollycroxton/commons-lang,hollycroxton/commons-lang,jacktan1991/commons-lang,chaoyi66/commons-lang,arbasha/commons-lang,PascalSchumacher/commons-lang,MarkDacek/commons-lang,apache/commons-lang,weston100721/commons-lang,longestname1/commonslang
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.time; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import java.util.Calendar; import java.util.TimeZone; import junit.framework.TestCase; /** * TestCase for DurationFormatUtils. * */ public class DurationFormatUtilsTest extends TestCase { public DurationFormatUtilsTest(String s) { super(s); } // ----------------------------------------------------------------------- public void testConstructor() { assertNotNull(new DurationFormatUtils()); Constructor<?>[] cons = DurationFormatUtils.class.getDeclaredConstructors(); assertEquals(1, cons.length); assertEquals(true, Modifier.isPublic(cons[0].getModifiers())); assertEquals(true, Modifier.isPublic(DurationFormatUtils.class.getModifiers())); assertEquals(false, Modifier.isFinal(DurationFormatUtils.class.getModifiers())); } // ----------------------------------------------------------------------- public void testFormatDurationWords() { String text = null; text = DurationFormatUtils.formatDurationWords(50 * 1000, true, false); assertEquals("50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, true, false); assertEquals("1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, true, false); assertEquals("2 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, true, false); assertEquals("2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, true, false); assertEquals("1 hour 12 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000, true, false); assertEquals("1 day 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(50 * 1000, true, true); assertEquals("50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, true, true); assertEquals("1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, true, true); assertEquals("2 minutes", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, true, true); assertEquals("2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, true, true); assertEquals("1 hour 12 minutes", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000, true, true); assertEquals("1 day", text); text = DurationFormatUtils.formatDurationWords(50 * 1000, false, true); assertEquals("0 days 0 hours 0 minutes 50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, false, true); assertEquals("0 days 0 hours 1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, false, true); assertEquals("0 days 0 hours 2 minutes", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, false, true); assertEquals("0 days 0 hours 2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, false, true); assertEquals("0 days 1 hour 12 minutes", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000, false, true); assertEquals("1 day", text); text = DurationFormatUtils.formatDurationWords(50 * 1000, false, false); assertEquals("0 days 0 hours 0 minutes 50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, false, false); assertEquals("0 days 0 hours 1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, false, false); assertEquals("0 days 0 hours 2 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, false, false); assertEquals("0 days 0 hours 2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, false, false); assertEquals("0 days 1 hour 12 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000 + 72 * 60 * 1000, false, false); assertEquals("1 day 1 hour 12 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(2 * 24 * 60 * 60 * 1000 + 72 * 60 * 1000, false, false); assertEquals("2 days 1 hour 12 minutes 0 seconds", text); for (int i = 2; i < 31; i++) { text = DurationFormatUtils.formatDurationWords(i * 24 * 60 * 60 * 1000L, false, false); // assertEquals(i + " days 0 hours 0 minutes 0 seconds", text); // // junit.framework.ComparisonFailure: expected:<25 days 0 hours 0 minutes 0...> but was:<-24 days -17 hours // -2 minutes -47...> // at junit.framework.Assert.assertEquals(Assert.java:81) // at junit.framework.Assert.assertEquals(Assert.java:87) // at // org.apache.commons.lang.time.DurationFormatUtilsTest.testFormatDurationWords(DurationFormatUtilsTest.java:124) // at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) // at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) // at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) // at java.lang.reflect.Method.invoke(Method.java:324) // at junit.framework.TestCase.runTest(TestCase.java:154) // at junit.framework.TestCase.runBare(TestCase.java:127) // at junit.framework.TestResult$1.protect(TestResult.java:106) // at junit.framework.TestResult.runProtected(TestResult.java:124) // at junit.framework.TestResult.run(TestResult.java:109) // at junit.framework.TestCase.run(TestCase.java:118) // at junit.framework.TestSuite.runTest(TestSuite.java:208) // at junit.framework.TestSuite.run(TestSuite.java:203) // at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:478) // at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:344) // at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:196) } } /** * Tests that "1 <unit>s" gets converted to "1 <unit>" but that "11 <unit>s" is left alone. */ public void testFormatDurationPluralWords() { long oneSecond = 1000; long oneMinute = oneSecond * 60; long oneHour = oneMinute * 60; long oneDay = oneHour * 24; String text = null; text = DurationFormatUtils.formatDurationWords(oneSecond, false, false); assertEquals("0 days 0 hours 0 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(oneSecond * 2, false, false); assertEquals("0 days 0 hours 0 minutes 2 seconds", text); text = DurationFormatUtils.formatDurationWords(oneSecond * 11, false, false); assertEquals("0 days 0 hours 0 minutes 11 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute, false, false); assertEquals("0 days 0 hours 1 minute 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute * 2, false, false); assertEquals("0 days 0 hours 2 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute * 11, false, false); assertEquals("0 days 0 hours 11 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute + oneSecond, false, false); assertEquals("0 days 0 hours 1 minute 1 second", text); text = DurationFormatUtils.formatDurationWords(oneHour, false, false); assertEquals("0 days 1 hour 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneHour * 2, false, false); assertEquals("0 days 2 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneHour * 11, false, false); assertEquals("0 days 11 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneHour + oneMinute + oneSecond, false, false); assertEquals("0 days 1 hour 1 minute 1 second", text); text = DurationFormatUtils.formatDurationWords(oneDay, false, false); assertEquals("1 day 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneDay * 2, false, false); assertEquals("2 days 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneDay * 11, false, false); assertEquals("11 days 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneDay + oneHour + oneMinute + oneSecond, false, false); assertEquals("1 day 1 hour 1 minute 1 second", text); } public void testFormatDurationHMS() { long time = 0; assertEquals("0:00:00.000", DurationFormatUtils.formatDurationHMS(time)); time = 1; assertEquals("0:00:00.001", DurationFormatUtils.formatDurationHMS(time)); time = 15; assertEquals("0:00:00.015", DurationFormatUtils.formatDurationHMS(time)); time = 165; assertEquals("0:00:00.165", DurationFormatUtils.formatDurationHMS(time)); time = 1675; assertEquals("0:00:01.675", DurationFormatUtils.formatDurationHMS(time)); time = 13465; assertEquals("0:00:13.465", DurationFormatUtils.formatDurationHMS(time)); time = 72789; assertEquals("0:01:12.789", DurationFormatUtils.formatDurationHMS(time)); time = 12789 + 32 * 60000; assertEquals("0:32:12.789", DurationFormatUtils.formatDurationHMS(time)); time = 12789 + 62 * 60000; assertEquals("1:02:12.789", DurationFormatUtils.formatDurationHMS(time)); } public void testFormatDurationISO() { assertEquals("P0Y0M0DT0H0M0.000S", DurationFormatUtils.formatDurationISO(0L)); assertEquals("P0Y0M0DT0H0M0.001S", DurationFormatUtils.formatDurationISO(1L)); assertEquals("P0Y0M0DT0H0M0.010S", DurationFormatUtils.formatDurationISO(10L)); assertEquals("P0Y0M0DT0H0M0.100S", DurationFormatUtils.formatDurationISO(100L)); assertEquals("P0Y0M0DT0H1M15.321S", DurationFormatUtils.formatDurationISO(75321L)); } public void testFormatDuration() { long duration = 0; assertEquals("0", DurationFormatUtils.formatDuration(duration, "y")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "M")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "d")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "H")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "m")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "s")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "S")); assertEquals("0000", DurationFormatUtils.formatDuration(duration, "SSSS")); assertEquals("0000", DurationFormatUtils.formatDuration(duration, "yyyy")); assertEquals("0000", DurationFormatUtils.formatDuration(duration, "yyMM")); duration = 60 * 1000; assertEquals("0", DurationFormatUtils.formatDuration(duration, "y")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "M")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "d")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "H")); assertEquals("1", DurationFormatUtils.formatDuration(duration, "m")); assertEquals("60", DurationFormatUtils.formatDuration(duration, "s")); assertEquals("60000", DurationFormatUtils.formatDuration(duration, "S")); assertEquals("01:00", DurationFormatUtils.formatDuration(duration, "mm:ss")); Calendar base = Calendar.getInstance(); base.set(2000, 0, 1, 0, 0, 0); base.set(Calendar.MILLISECOND, 0); Calendar cal = Calendar.getInstance(); cal.set(2003, 1, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); duration = cal.getTime().getTime() - base.getTime().getTime(); // duration from 2000-01-01 to cal // don't use 1970 in test as time zones were less reliable in 1970 than now // remember that duration formatting ignores time zones, working on strict hour lengths int days = 366 + 365 + 365 + 31; assertEquals("0 0 " + days, DurationFormatUtils.formatDuration(duration, "y M d")); } public void testFormatPeriodISO() { TimeZone timeZone = TimeZone.getTimeZone("GMT-3"); Calendar base = Calendar.getInstance(timeZone); base.set(1970, 0, 1, 0, 0, 0); base.set(Calendar.MILLISECOND, 0); Calendar cal = Calendar.getInstance(timeZone); cal.set(2002, 1, 23, 9, 11, 12); cal.set(Calendar.MILLISECOND, 1); String text; // repeat a test from testDateTimeISO to compare extended and not extended. text = DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.format(cal); assertEquals("2002-02-23T09:11:12-03:00", text); // test fixture is the same as above, but now with extended format. text = DurationFormatUtils.formatPeriod(base.getTime().getTime(), cal.getTime().getTime(), DurationFormatUtils.ISO_EXTENDED_FORMAT_PATTERN, false, timeZone); assertEquals("P32Y1M22DT9H11M12.001S", text); // test fixture from example in http://www.w3.org/TR/xmlschema-2/#duration cal.set(1971, 1, 3, 10, 30, 0); cal.set(Calendar.MILLISECOND, 0); text = DurationFormatUtils.formatPeriod(base.getTime().getTime(), cal.getTime().getTime(), DurationFormatUtils.ISO_EXTENDED_FORMAT_PATTERN, false, timeZone); assertEquals("P1Y1M2DT10H30M0.000S", text); // want a way to say 'don't print the seconds in format()' or other fields for that matter: // assertEquals("P1Y2M3DT10H30M", text); } public void testFormatPeriod() { Calendar cal1970 = Calendar.getInstance(); cal1970.set(1970, 0, 1, 0, 0, 0); cal1970.set(Calendar.MILLISECOND, 0); long time1970 = cal1970.getTime().getTime(); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "y")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "M")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "d")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "H")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "m")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "s")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "S")); assertEquals("0000", DurationFormatUtils.formatPeriod(time1970, time1970, "SSSS")); assertEquals("0000", DurationFormatUtils.formatPeriod(time1970, time1970, "yyyy")); assertEquals("0000", DurationFormatUtils.formatPeriod(time1970, time1970, "yyMM")); long time = time1970 + 60 * 1000; assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "y")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "M")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "d")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "H")); assertEquals("1", DurationFormatUtils.formatPeriod(time1970, time, "m")); assertEquals("60", DurationFormatUtils.formatPeriod(time1970, time, "s")); assertEquals("60000", DurationFormatUtils.formatPeriod(time1970, time, "S")); assertEquals("01:00", DurationFormatUtils.formatPeriod(time1970, time, "mm:ss")); Calendar cal = Calendar.getInstance(); cal.set(1973, 6, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); time = cal.getTime().getTime(); assertEquals("36", DurationFormatUtils.formatPeriod(time1970, time, "yM")); assertEquals("3 years 6 months", DurationFormatUtils.formatPeriod(time1970, time, "y' years 'M' months'")); assertEquals("03/06", DurationFormatUtils.formatPeriod(time1970, time, "yy/MM")); cal.set(1973, 10, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); time = cal.getTime().getTime(); assertEquals("310", DurationFormatUtils.formatPeriod(time1970, time, "yM")); assertEquals("3 years 10 months", DurationFormatUtils.formatPeriod(time1970, time, "y' years 'M' months'")); assertEquals("03/10", DurationFormatUtils.formatPeriod(time1970, time, "yy/MM")); cal.set(1974, 0, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); time = cal.getTime().getTime(); assertEquals("40", DurationFormatUtils.formatPeriod(time1970, time, "yM")); assertEquals("4 years 0 months", DurationFormatUtils.formatPeriod(time1970, time, "y' years 'M' months'")); assertEquals("04/00", DurationFormatUtils.formatPeriod(time1970, time, "yy/MM")); assertEquals("48", DurationFormatUtils.formatPeriod(time1970, time, "M")); assertEquals("48", DurationFormatUtils.formatPeriod(time1970, time, "MM")); assertEquals("048", DurationFormatUtils.formatPeriod(time1970, time, "MMM")); } public void testLexx() { // tests each constant assertArrayEquals(new DurationFormatUtils.Token[]{ new DurationFormatUtils.Token(DurationFormatUtils.y, 1), new DurationFormatUtils.Token(DurationFormatUtils.M, 1), new DurationFormatUtils.Token(DurationFormatUtils.d, 1), new DurationFormatUtils.Token(DurationFormatUtils.H, 1), new DurationFormatUtils.Token(DurationFormatUtils.m, 1), new DurationFormatUtils.Token(DurationFormatUtils.s, 1), new DurationFormatUtils.Token(DurationFormatUtils.S, 1)}, DurationFormatUtils.lexx("yMdHmsS")); // tests the ISO8601-like assertArrayEquals(new DurationFormatUtils.Token[]{ new DurationFormatUtils.Token(DurationFormatUtils.H, 1), new DurationFormatUtils.Token(new StringBuffer(":"), 1), new DurationFormatUtils.Token(DurationFormatUtils.m, 2), new DurationFormatUtils.Token(new StringBuffer(":"), 1), new DurationFormatUtils.Token(DurationFormatUtils.s, 2), new DurationFormatUtils.Token(new StringBuffer("."), 1), new DurationFormatUtils.Token(DurationFormatUtils.S, 3)}, DurationFormatUtils.lexx("H:mm:ss.SSS")); // test the iso extended format assertArrayEquals(new DurationFormatUtils.Token[]{ new DurationFormatUtils.Token(new StringBuffer("P"), 1), new DurationFormatUtils.Token(DurationFormatUtils.y, 4), new DurationFormatUtils.Token(new StringBuffer("Y"), 1), new DurationFormatUtils.Token(DurationFormatUtils.M, 1), new DurationFormatUtils.Token(new StringBuffer("M"), 1), new DurationFormatUtils.Token(DurationFormatUtils.d, 1), new DurationFormatUtils.Token(new StringBuffer("DT"), 1), new DurationFormatUtils.Token(DurationFormatUtils.H, 1), new DurationFormatUtils.Token(new StringBuffer("H"), 1), new DurationFormatUtils.Token(DurationFormatUtils.m, 1), new DurationFormatUtils.Token(new StringBuffer("M"), 1), new DurationFormatUtils.Token(DurationFormatUtils.s, 1), new DurationFormatUtils.Token(new StringBuffer("."), 1), new DurationFormatUtils.Token(DurationFormatUtils.S, 1), new DurationFormatUtils.Token(new StringBuffer("S"), 1)}, DurationFormatUtils .lexx(DurationFormatUtils.ISO_EXTENDED_FORMAT_PATTERN)); // test failures in equals DurationFormatUtils.Token token = new DurationFormatUtils.Token(DurationFormatUtils.y, 4); assertFalse("Token equal to non-Token class. ", token.equals(new Object())); assertFalse("Token equal to Token with wrong value class. ", token.equals(new DurationFormatUtils.Token( new Object()))); assertFalse("Token equal to Token with different count. ", token.equals(new DurationFormatUtils.Token( DurationFormatUtils.y, 1))); DurationFormatUtils.Token numToken = new DurationFormatUtils.Token(new Integer(1), 4); assertTrue("Token with Number value not equal to itself. ", numToken.equals(numToken)); } // http://issues.apache.org/bugzilla/show_bug.cgi?id=38401 public void testBugzilla38401() { assertEqualDuration( "0000/00/30 16:00:00 000", new int[] { 2006, 0, 26, 18, 47, 34 }, new int[] { 2006, 1, 26, 10, 47, 34 }, "yyyy/MM/dd HH:mm:ss SSS"); } // https://issues.apache.org/jira/browse/LANG-281 public void testJiraLang281() { assertEqualDuration( "09", new int[] { 2005, 11, 31, 0, 0, 0 }, new int[] { 2006, 9, 6, 0, 0, 0 }, "MM"); } // Testing the under a day range in DurationFormatUtils.formatPeriod public void testLowDurations() { for(int hr=0; hr < 24; hr++) { for(int min=0; min < 60; min++) { for(int sec=0; sec < 60; sec++) { assertEqualDuration( hr + ":" + min + ":" + sec, new int[] { 2000, 0, 1, 0, 0, 0, 0 }, new int[] { 2000, 0, 1, hr, min, sec }, "H:m:s" ); } } } } // Attempting to test edge cases in DurationFormatUtils.formatPeriod public void testEdgeDurations() { assertEqualDuration( "01", new int[] { 2006, 0, 15, 0, 0, 0 }, new int[] { 2006, 2, 10, 0, 0, 0 }, "MM"); assertEqualDuration( "12", new int[] { 2005, 0, 15, 0, 0, 0 }, new int[] { 2006, 0, 15, 0, 0, 0 }, "MM"); assertEqualDuration( "12", new int[] { 2005, 0, 15, 0, 0, 0 }, new int[] { 2006, 0, 16, 0, 0, 0 }, "MM"); assertEqualDuration( "11", new int[] { 2005, 0, 15, 0, 0, 0 }, new int[] { 2006, 0, 14, 0, 0, 0 }, "MM"); assertEqualDuration( "01 26", new int[] { 2006, 0, 15, 0, 0, 0 }, new int[] { 2006, 2, 10, 0, 0, 0 }, "MM dd"); assertEqualDuration( "54", new int[] { 2006, 0, 15, 0, 0, 0 }, new int[] { 2006, 2, 10, 0, 0, 0 }, "dd"); assertEqualDuration( "09 12", new int[] { 2006, 1, 20, 0, 0, 0 }, new int[] { 2006, 11, 4, 0, 0, 0 }, "MM dd"); assertEqualDuration( "287", new int[] { 2006, 1, 20, 0, 0, 0 }, new int[] { 2006, 11, 4, 0, 0, 0 }, "dd"); assertEqualDuration( "11 30", new int[] { 2006, 0, 2, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "MM dd"); assertEqualDuration( "364", new int[] { 2006, 0, 2, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "12 00", new int[] { 2006, 0, 1, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "MM dd"); assertEqualDuration( "365", new int[] { 2006, 0, 1, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "31", new int[] { 2006, 0, 1, 0, 0, 0 }, new int[] { 2006, 1, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "92", new int[] { 2005, 9, 1, 0, 0, 0 }, new int[] { 2006, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "77", new int[] { 2005, 9, 16, 0, 0, 0 }, new int[] { 2006, 0, 1, 0, 0, 0 }, "dd"); // test month larger in start than end assertEqualDuration( "136", new int[] { 2005, 9, 16, 0, 0, 0 }, new int[] { 2006, 2, 1, 0, 0, 0 }, "dd"); // test when start in leap year assertEqualDuration( "136", new int[] { 2004, 9, 16, 0, 0, 0 }, new int[] { 2005, 2, 1, 0, 0, 0 }, "dd"); // test when end in leap year assertEqualDuration( "137", new int[] { 2003, 9, 16, 0, 0, 0 }, new int[] { 2004, 2, 1, 0, 0, 0 }, "dd"); // test when end in leap year but less than end of feb assertEqualDuration( "135", new int[] { 2003, 9, 16, 0, 0, 0 }, new int[] { 2004, 1, 28, 0, 0, 0 }, "dd"); assertEqualDuration( "364", new int[] { 2007, 0, 2, 0, 0, 0 }, new int[] { 2008, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "729", new int[] { 2006, 0, 2, 0, 0, 0 }, new int[] { 2008, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "365", new int[] { 2007, 2, 2, 0, 0, 0 }, new int[] { 2008, 2, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "333", new int[] { 2007, 1, 2, 0, 0, 0 }, new int[] { 2008, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "28", new int[] { 2008, 1, 2, 0, 0, 0 }, new int[] { 2008, 2, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "393", new int[] { 2007, 1, 2, 0, 0, 0 }, new int[] { 2008, 2, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "369", new int[] { 2004, 0, 29, 0, 0, 0 }, new int[] { 2005, 1, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "338", new int[] { 2004, 1, 29, 0, 0, 0 }, new int[] { 2005, 1, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "28", new int[] { 2004, 2, 8, 0, 0, 0 }, new int[] { 2004, 3, 5, 0, 0, 0 }, "dd"); assertEqualDuration( "48", new int[] { 1992, 1, 29, 0, 0, 0 }, new int[] { 1996, 1, 29, 0, 0, 0 }, "M"); // this seems odd - and will fail if I throw it in as a brute force // below as it expects the answer to be 12. It's a tricky edge case assertEqualDuration( "11", new int[] { 1996, 1, 29, 0, 0, 0 }, new int[] { 1997, 1, 28, 0, 0, 0 }, "M"); // again - this seems odd assertEqualDuration( "11 28", new int[] { 1996, 1, 29, 0, 0, 0 }, new int[] { 1997, 1, 28, 0, 0, 0 }, "M d"); } public void testDurationsByBruteForce() { bruteForce(2006, 0, 1, "d", Calendar.DAY_OF_MONTH); bruteForce(2006, 0, 2, "d", Calendar.DAY_OF_MONTH); bruteForce(2007, 1, 2, "d", Calendar.DAY_OF_MONTH); bruteForce(2004, 1, 29, "d", Calendar.DAY_OF_MONTH); bruteForce(1996, 1, 29, "d", Calendar.DAY_OF_MONTH); bruteForce(1969, 1, 28, "M", Calendar.MONTH); // tests for 48 years //bruteForce(1996, 1, 29, "M", Calendar.MONTH); // this will fail } private static final int FOUR_YEARS = 365 * 3 + 366; // Takes a minute to run, so generally turned off // public void testBrutally() { // Calendar c = Calendar.getInstance(); // c.set(2004, 0, 1, 0, 0, 0); // for (int i=0; i < FOUR_YEARS; i++) { // bruteForce(c.get(Calendar.YEAR), c.get(Calendar.MONTH), c.get(Calendar.DAY_OF_MONTH), "d", Calendar.DAY_OF_MONTH ); // c.add(Calendar.DAY_OF_MONTH, 1); // } // } private void bruteForce(int year, int month, int day, String format, int calendarType) { String msg = year + "-" + month + "-" + day + " to "; Calendar c = Calendar.getInstance(); c.set(year, month, day, 0, 0, 0); int[] array1 = new int[] { year, month, day, 0, 0, 0 }; int[] array2 = new int[] { year, month, day, 0, 0, 0 }; for (int i=0; i < FOUR_YEARS; i++) { array2[0] = c.get(Calendar.YEAR); array2[1] = c.get(Calendar.MONTH); array2[2] = c.get(Calendar.DAY_OF_MONTH); String tmpMsg = msg + array2[0] + "-" + array2[1] + "-" + array2[2] + " at "; assertEqualDuration( tmpMsg + i, Integer.toString(i), array1, array2, format ); c.add(calendarType, 1); } } private void assertEqualDuration(String expected, int[] start, int[] end, String format) { assertEqualDuration(null, expected, start, end, format); } private void assertEqualDuration(String message, String expected, int[] start, int[] end, String format) { Calendar cal1 = Calendar.getInstance(); cal1.set(start[0], start[1], start[2], start[3], start[4], start[5]); cal1.set(Calendar.MILLISECOND, 0); Calendar cal2 = Calendar.getInstance(); cal2.set(end[0], end[1], end[2], end[3], end[4], end[5]); cal2.set(Calendar.MILLISECOND, 0); long milli1 = cal1.getTime().getTime(); long milli2 = cal2.getTime().getTime(); String result = DurationFormatUtils.formatPeriod(milli1, milli2, format); if (message == null) { assertEquals(expected, result); } else { assertEquals(message, expected, result); } } private void assertArrayEquals(DurationFormatUtils.Token[] obj1, DurationFormatUtils.Token[] obj2) { assertEquals("Arrays are unequal length. ", obj1.length, obj2.length); for (int i = 0; i < obj1.length; i++) { assertTrue("Index " + i + " not equal, " + obj1[i] + " vs " + obj2[i], obj1[i].equals(obj2[i])); } } }
src/test/java/org/apache/commons/lang3/time/DurationFormatUtilsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.time; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import java.util.Calendar; import java.util.TimeZone; import junit.framework.TestCase; /** * TestCase for DurationFormatUtils. * */ public class DurationFormatUtilsTest extends TestCase { public DurationFormatUtilsTest(String s) { super(s); } // ----------------------------------------------------------------------- public void testConstructor() { assertNotNull(new DurationFormatUtils()); Constructor<?>[] cons = DurationFormatUtils.class.getDeclaredConstructors(); assertEquals(1, cons.length); assertEquals(true, Modifier.isPublic(cons[0].getModifiers())); assertEquals(true, Modifier.isPublic(DurationFormatUtils.class.getModifiers())); assertEquals(false, Modifier.isFinal(DurationFormatUtils.class.getModifiers())); } // ----------------------------------------------------------------------- public void testFormatDurationWords() { String text = null; text = DurationFormatUtils.formatDurationWords(50 * 1000, true, false); assertEquals("50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, true, false); assertEquals("1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, true, false); assertEquals("2 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, true, false); assertEquals("2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, true, false); assertEquals("1 hour 12 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000, true, false); assertEquals("1 day 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(50 * 1000, true, true); assertEquals("50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, true, true); assertEquals("1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, true, true); assertEquals("2 minutes", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, true, true); assertEquals("2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, true, true); assertEquals("1 hour 12 minutes", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000, true, true); assertEquals("1 day", text); text = DurationFormatUtils.formatDurationWords(50 * 1000, false, true); assertEquals("0 days 0 hours 0 minutes 50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, false, true); assertEquals("0 days 0 hours 1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, false, true); assertEquals("0 days 0 hours 2 minutes", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, false, true); assertEquals("0 days 0 hours 2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, false, true); assertEquals("0 days 1 hour 12 minutes", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000, false, true); assertEquals("1 day", text); text = DurationFormatUtils.formatDurationWords(50 * 1000, false, false); assertEquals("0 days 0 hours 0 minutes 50 seconds", text); text = DurationFormatUtils.formatDurationWords(65 * 1000, false, false); assertEquals("0 days 0 hours 1 minute 5 seconds", text); text = DurationFormatUtils.formatDurationWords(120 * 1000, false, false); assertEquals("0 days 0 hours 2 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(121 * 1000, false, false); assertEquals("0 days 0 hours 2 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(72 * 60 * 1000, false, false); assertEquals("0 days 1 hour 12 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(24 * 60 * 60 * 1000 + 72 * 60 * 1000, false, false); assertEquals("1 day 1 hour 12 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(2 * 24 * 60 * 60 * 1000 + 72 * 60 * 1000, false, false); assertEquals("2 days 1 hour 12 minutes 0 seconds", text); for (int i = 2; i < 31; i++) { text = DurationFormatUtils.formatDurationWords(i * 24 * 60 * 60 * 1000L, false, false); // assertEquals(i + " days 0 hours 0 minutes 0 seconds", text); // // junit.framework.ComparisonFailure: expected:<25 days 0 hours 0 minutes 0...> but was:<-24 days -17 hours // -2 minutes -47...> // at junit.framework.Assert.assertEquals(Assert.java:81) // at junit.framework.Assert.assertEquals(Assert.java:87) // at // org.apache.commons.lang.time.DurationFormatUtilsTest.testFormatDurationWords(DurationFormatUtilsTest.java:124) // at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) // at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) // at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) // at java.lang.reflect.Method.invoke(Method.java:324) // at junit.framework.TestCase.runTest(TestCase.java:154) // at junit.framework.TestCase.runBare(TestCase.java:127) // at junit.framework.TestResult$1.protect(TestResult.java:106) // at junit.framework.TestResult.runProtected(TestResult.java:124) // at junit.framework.TestResult.run(TestResult.java:109) // at junit.framework.TestCase.run(TestCase.java:118) // at junit.framework.TestSuite.runTest(TestSuite.java:208) // at junit.framework.TestSuite.run(TestSuite.java:203) // at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:478) // at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:344) // at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:196) } } /** * Tests that "1 <unit>s" gets converted to "1 <unit>" but that "11 <unit>s" is left alone. */ public void testFormatDurationPluralWords() { long oneSecond = 1000; long oneMinute = oneSecond * 60; long oneHour = oneMinute * 60; long oneDay = oneHour * 24; String text = null; text = DurationFormatUtils.formatDurationWords(oneSecond, false, false); assertEquals("0 days 0 hours 0 minutes 1 second", text); text = DurationFormatUtils.formatDurationWords(oneSecond * 2, false, false); assertEquals("0 days 0 hours 0 minutes 2 seconds", text); text = DurationFormatUtils.formatDurationWords(oneSecond * 11, false, false); assertEquals("0 days 0 hours 0 minutes 11 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute, false, false); assertEquals("0 days 0 hours 1 minute 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute * 2, false, false); assertEquals("0 days 0 hours 2 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute * 11, false, false); assertEquals("0 days 0 hours 11 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneMinute + oneSecond, false, false); assertEquals("0 days 0 hours 1 minute 1 second", text); text = DurationFormatUtils.formatDurationWords(oneHour, false, false); assertEquals("0 days 1 hour 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneHour * 2, false, false); assertEquals("0 days 2 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneHour * 11, false, false); assertEquals("0 days 11 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneHour + oneMinute + oneSecond, false, false); assertEquals("0 days 1 hour 1 minute 1 second", text); text = DurationFormatUtils.formatDurationWords(oneDay, false, false); assertEquals("1 day 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneDay * 2, false, false); assertEquals("2 days 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneDay * 11, false, false); assertEquals("11 days 0 hours 0 minutes 0 seconds", text); text = DurationFormatUtils.formatDurationWords(oneDay + oneHour + oneMinute + oneSecond, false, false); assertEquals("1 day 1 hour 1 minute 1 second", text); } public void testFormatDurationHMS() { long time = 0; assertEquals("0:00:00.000", DurationFormatUtils.formatDurationHMS(time)); time = 1; assertEquals("0:00:00.001", DurationFormatUtils.formatDurationHMS(time)); time = 15; assertEquals("0:00:00.015", DurationFormatUtils.formatDurationHMS(time)); time = 165; assertEquals("0:00:00.165", DurationFormatUtils.formatDurationHMS(time)); time = 1675; assertEquals("0:00:01.675", DurationFormatUtils.formatDurationHMS(time)); time = 13465; assertEquals("0:00:13.465", DurationFormatUtils.formatDurationHMS(time)); time = 72789; assertEquals("0:01:12.789", DurationFormatUtils.formatDurationHMS(time)); time = 12789 + 32 * 60000; assertEquals("0:32:12.789", DurationFormatUtils.formatDurationHMS(time)); time = 12789 + 62 * 60000; assertEquals("1:02:12.789", DurationFormatUtils.formatDurationHMS(time)); } public void testFormatDurationISO() { assertEquals("P0Y0M0DT0H0M0.000S", DurationFormatUtils.formatDurationISO(0L)); assertEquals("P0Y0M0DT0H0M0.001S", DurationFormatUtils.formatDurationISO(1L)); assertEquals("P0Y0M0DT0H0M0.010S", DurationFormatUtils.formatDurationISO(10L)); assertEquals("P0Y0M0DT0H0M0.100S", DurationFormatUtils.formatDurationISO(100L)); assertEquals("P0Y0M0DT0H1M15.321S", DurationFormatUtils.formatDurationISO(75321L)); } public void testFormatDuration() { long duration = 0; assertEquals("0", DurationFormatUtils.formatDuration(duration, "y")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "M")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "d")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "H")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "m")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "s")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "S")); assertEquals("0000", DurationFormatUtils.formatDuration(duration, "SSSS")); assertEquals("0000", DurationFormatUtils.formatDuration(duration, "yyyy")); assertEquals("0000", DurationFormatUtils.formatDuration(duration, "yyMM")); duration = 60 * 1000; assertEquals("0", DurationFormatUtils.formatDuration(duration, "y")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "M")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "d")); assertEquals("0", DurationFormatUtils.formatDuration(duration, "H")); assertEquals("1", DurationFormatUtils.formatDuration(duration, "m")); assertEquals("60", DurationFormatUtils.formatDuration(duration, "s")); assertEquals("60000", DurationFormatUtils.formatDuration(duration, "S")); assertEquals("01:00", DurationFormatUtils.formatDuration(duration, "mm:ss")); Calendar base = Calendar.getInstance(); base.set(2000, 0, 1, 0, 0, 0); base.set(Calendar.MILLISECOND, 0); Calendar cal = Calendar.getInstance(); cal.set(2003, 1, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); duration = cal.getTime().getTime() - base.getTime().getTime(); // duration from 2000-01-01 to cal // don't use 1970 in test as time zones were less reliable in 1970 than now // remember that duration formatting ignores time zones, working on strict hour lengths int days = 366 + 365 + 365 + 31; assertEquals("0 0 " + days, DurationFormatUtils.formatDuration(duration, "y M d")); } public void testFormatPeriodISO() { TimeZone timeZone = TimeZone.getTimeZone("GMT-3"); Calendar base = Calendar.getInstance(timeZone); base.set(1970, 0, 1, 0, 0, 0); base.set(Calendar.MILLISECOND, 0); Calendar cal = Calendar.getInstance(timeZone); cal.set(2002, 1, 23, 9, 11, 12); cal.set(Calendar.MILLISECOND, 1); String text; // repeat a test from testDateTimeISO to compare extended and not extended. text = DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.format(cal); assertEquals("2002-02-23T09:11:12-03:00", text); // test fixture is the same as above, but now with extended format. text = DurationFormatUtils.formatPeriod(base.getTime().getTime(), cal.getTime().getTime(), DurationFormatUtils.ISO_EXTENDED_FORMAT_PATTERN, false, timeZone); assertEquals("P32Y1M22DT9H11M12.001S", text); // test fixture from example in http://www.w3.org/TR/xmlschema-2/#duration cal.set(1971, 1, 3, 10, 30, 0); cal.set(Calendar.MILLISECOND, 0); text = DurationFormatUtils.formatPeriod(base.getTime().getTime(), cal.getTime().getTime(), DurationFormatUtils.ISO_EXTENDED_FORMAT_PATTERN, false, timeZone); assertEquals("P1Y1M2DT10H30M0.000S", text); // want a way to say 'don't print the seconds in format()' or other fields for that matter: // assertEquals("P1Y2M3DT10H30M", text); } public void testFormatPeriod() { Calendar cal1970 = Calendar.getInstance(); cal1970.set(1970, 0, 1, 0, 0, 0); cal1970.set(Calendar.MILLISECOND, 0); long time1970 = cal1970.getTime().getTime(); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "y")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "M")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "d")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "H")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "m")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "s")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time1970, "S")); assertEquals("0000", DurationFormatUtils.formatPeriod(time1970, time1970, "SSSS")); assertEquals("0000", DurationFormatUtils.formatPeriod(time1970, time1970, "yyyy")); assertEquals("0000", DurationFormatUtils.formatPeriod(time1970, time1970, "yyMM")); long time = time1970 + 60 * 1000; assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "y")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "M")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "d")); assertEquals("0", DurationFormatUtils.formatPeriod(time1970, time, "H")); assertEquals("1", DurationFormatUtils.formatPeriod(time1970, time, "m")); assertEquals("60", DurationFormatUtils.formatPeriod(time1970, time, "s")); assertEquals("60000", DurationFormatUtils.formatPeriod(time1970, time, "S")); assertEquals("01:00", DurationFormatUtils.formatPeriod(time1970, time, "mm:ss")); Calendar cal = Calendar.getInstance(); cal.set(1973, 6, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); time = cal.getTime().getTime(); assertEquals("36", DurationFormatUtils.formatPeriod(time1970, time, "yM")); assertEquals("3 years 6 months", DurationFormatUtils.formatPeriod(time1970, time, "y' years 'M' months'")); assertEquals("03/06", DurationFormatUtils.formatPeriod(time1970, time, "yy/MM")); cal.set(1973, 10, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); time = cal.getTime().getTime(); assertEquals("310", DurationFormatUtils.formatPeriod(time1970, time, "yM")); assertEquals("3 years 10 months", DurationFormatUtils.formatPeriod(time1970, time, "y' years 'M' months'")); assertEquals("03/10", DurationFormatUtils.formatPeriod(time1970, time, "yy/MM")); cal.set(1974, 0, 1, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); time = cal.getTime().getTime(); assertEquals("40", DurationFormatUtils.formatPeriod(time1970, time, "yM")); assertEquals("4 years 0 months", DurationFormatUtils.formatPeriod(time1970, time, "y' years 'M' months'")); assertEquals("04/00", DurationFormatUtils.formatPeriod(time1970, time, "yy/MM")); assertEquals("48", DurationFormatUtils.formatPeriod(time1970, time, "M")); assertEquals("48", DurationFormatUtils.formatPeriod(time1970, time, "MM")); assertEquals("048", DurationFormatUtils.formatPeriod(time1970, time, "MMM")); } public void testLexx() { // tests each constant assertArrayEquals(new DurationFormatUtils.Token[]{ new DurationFormatUtils.Token(DurationFormatUtils.y, 1), new DurationFormatUtils.Token(DurationFormatUtils.M, 1), new DurationFormatUtils.Token(DurationFormatUtils.d, 1), new DurationFormatUtils.Token(DurationFormatUtils.H, 1), new DurationFormatUtils.Token(DurationFormatUtils.m, 1), new DurationFormatUtils.Token(DurationFormatUtils.s, 1), new DurationFormatUtils.Token(DurationFormatUtils.S, 1)}, DurationFormatUtils.lexx("yMdHmsS")); // tests the ISO8601-like assertArrayEquals(new DurationFormatUtils.Token[]{ new DurationFormatUtils.Token(DurationFormatUtils.H, 1), new DurationFormatUtils.Token(new StringBuffer(":"), 1), new DurationFormatUtils.Token(DurationFormatUtils.m, 2), new DurationFormatUtils.Token(new StringBuffer(":"), 1), new DurationFormatUtils.Token(DurationFormatUtils.s, 2), new DurationFormatUtils.Token(new StringBuffer("."), 1), new DurationFormatUtils.Token(DurationFormatUtils.S, 3)}, DurationFormatUtils.lexx("H:mm:ss.SSS")); // test the iso extended format assertArrayEquals(new DurationFormatUtils.Token[]{ new DurationFormatUtils.Token(new StringBuffer("P"), 1), new DurationFormatUtils.Token(DurationFormatUtils.y, 4), new DurationFormatUtils.Token(new StringBuffer("Y"), 1), new DurationFormatUtils.Token(DurationFormatUtils.M, 1), new DurationFormatUtils.Token(new StringBuffer("M"), 1), new DurationFormatUtils.Token(DurationFormatUtils.d, 1), new DurationFormatUtils.Token(new StringBuffer("DT"), 1), new DurationFormatUtils.Token(DurationFormatUtils.H, 1), new DurationFormatUtils.Token(new StringBuffer("H"), 1), new DurationFormatUtils.Token(DurationFormatUtils.m, 1), new DurationFormatUtils.Token(new StringBuffer("M"), 1), new DurationFormatUtils.Token(DurationFormatUtils.s, 1), new DurationFormatUtils.Token(new StringBuffer("."), 1), new DurationFormatUtils.Token(DurationFormatUtils.S, 1), new DurationFormatUtils.Token(new StringBuffer("S"), 1)}, DurationFormatUtils .lexx(DurationFormatUtils.ISO_EXTENDED_FORMAT_PATTERN)); // test failures in equals DurationFormatUtils.Token token = new DurationFormatUtils.Token(DurationFormatUtils.y, 4); assertFalse("Token equal to non-Token class. ", token.equals(new Object())); assertFalse("Token equal to Token with wrong value class. ", token.equals(new DurationFormatUtils.Token( new Object()))); assertFalse("Token equal to Token with different count. ", token.equals(new DurationFormatUtils.Token( DurationFormatUtils.y, 1))); DurationFormatUtils.Token numToken = new DurationFormatUtils.Token(new Integer(1), 4); assertTrue("Token with Number value not equal to itself. ", numToken.equals(numToken)); } // http://issues.apache.org/bugzilla/show_bug.cgi?id=38401 public void testBugzilla38401() { assertEqualDuration( "0000/00/30 16:00:00 000", new int[] { 2006, 0, 26, 18, 47, 34 }, new int[] { 2006, 1, 26, 10, 47, 34 }, "yyyy/MM/dd HH:mm:ss SSS"); } // https://issues.apache.org/jira/browse/LANG-281 public void testJiraLang281() { assertEqualDuration( "09", new int[] { 2005, 11, 31, 0, 0, 0 }, new int[] { 2006, 9, 6, 0, 0, 0 }, "MM"); } // Testing the under a day range in DurationFormatUtils.formatPeriod public void testLowDurations() { for(int hr=0; hr < 24; hr++) { for(int min=0; min < 60; min++) { for(int sec=0; sec < 60; sec++) { assertEqualDuration( hr + ":" + min + ":" + sec, new int[] { 2000, 0, 1, 0, 0, 0, 0 }, new int[] { 2000, 0, 1, hr, min, sec }, "H:m:s" ); } } } } // Attempting to test edge cases in DurationFormatUtils.formatPeriod public void testEdgeDurations() { assertEqualDuration( "01", new int[] { 2006, 0, 15, 0, 0, 0 }, new int[] { 2006, 2, 10, 0, 0, 0 }, "MM"); assertEqualDuration( "12", new int[] { 2005, 0, 15, 0, 0, 0 }, new int[] { 2006, 0, 15, 0, 0, 0 }, "MM"); assertEqualDuration( "12", new int[] { 2005, 0, 15, 0, 0, 0 }, new int[] { 2006, 0, 16, 0, 0, 0 }, "MM"); assertEqualDuration( "11", new int[] { 2005, 0, 15, 0, 0, 0 }, new int[] { 2006, 0, 14, 0, 0, 0 }, "MM"); assertEqualDuration( "01 26", new int[] { 2006, 0, 15, 0, 0, 0 }, new int[] { 2006, 2, 10, 0, 0, 0 }, "MM dd"); assertEqualDuration( "54", new int[] { 2006, 0, 15, 0, 0, 0 }, new int[] { 2006, 2, 10, 0, 0, 0 }, "dd"); assertEqualDuration( "09 12", new int[] { 2006, 1, 20, 0, 0, 0 }, new int[] { 2006, 11, 4, 0, 0, 0 }, "MM dd"); assertEqualDuration( "287", new int[] { 2006, 1, 20, 0, 0, 0 }, new int[] { 2006, 11, 4, 0, 0, 0 }, "dd"); assertEqualDuration( "11 30", new int[] { 2006, 0, 2, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "MM dd"); assertEqualDuration( "364", new int[] { 2006, 0, 2, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "12 00", new int[] { 2006, 0, 1, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "MM dd"); assertEqualDuration( "365", new int[] { 2006, 0, 1, 0, 0, 0 }, new int[] { 2007, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "31", new int[] { 2006, 0, 1, 0, 0, 0 }, new int[] { 2006, 1, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "92", new int[] { 2005, 9, 1, 0, 0, 0 }, new int[] { 2006, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "77", new int[] { 2005, 9, 16, 0, 0, 0 }, new int[] { 2006, 0, 1, 0, 0, 0 }, "dd"); // test month larger in start than end assertEqualDuration( "136", new int[] { 2005, 9, 16, 0, 0, 0 }, new int[] { 2006, 2, 1, 0, 0, 0 }, "dd"); // test when start in leap year assertEqualDuration( "136", new int[] { 2004, 9, 16, 0, 0, 0 }, new int[] { 2005, 2, 1, 0, 0, 0 }, "dd"); // test when end in leap year assertEqualDuration( "137", new int[] { 2003, 9, 16, 0, 0, 0 }, new int[] { 2004, 2, 1, 0, 0, 0 }, "dd"); // test when end in leap year but less than end of feb assertEqualDuration( "135", new int[] { 2003, 9, 16, 0, 0, 0 }, new int[] { 2004, 1, 28, 0, 0, 0 }, "dd"); assertEqualDuration( "364", new int[] { 2007, 0, 2, 0, 0, 0 }, new int[] { 2008, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "729", new int[] { 2006, 0, 2, 0, 0, 0 }, new int[] { 2008, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "365", new int[] { 2007, 2, 2, 0, 0, 0 }, new int[] { 2008, 2, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "333", new int[] { 2007, 1, 2, 0, 0, 0 }, new int[] { 2008, 0, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "28", new int[] { 2008, 1, 2, 0, 0, 0 }, new int[] { 2008, 2, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "393", new int[] { 2007, 1, 2, 0, 0, 0 }, new int[] { 2008, 2, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "369", new int[] { 2004, 0, 29, 0, 0, 0 }, new int[] { 2005, 1, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "338", new int[] { 2004, 1, 29, 0, 0, 0 }, new int[] { 2005, 1, 1, 0, 0, 0 }, "dd"); assertEqualDuration( "28", new int[] { 2004, 2, 8, 0, 0, 0 }, new int[] { 2004, 3, 5, 0, 0, 0 }, "dd"); assertEqualDuration( "48", new int[] { 1992, 1, 29, 0, 0, 0 }, new int[] { 1996, 1, 29, 0, 0, 0 }, "M"); // this seems odd - and will fail if I throw it in as a brute force // below as it expects the answer to be 12. It's a tricky edge case assertEqualDuration( "11", new int[] { 1996, 1, 29, 0, 0, 0 }, new int[] { 1997, 1, 28, 0, 0, 0 }, "M"); // again - this seems odd assertEqualDuration( "11 28", new int[] { 1996, 1, 29, 0, 0, 0 }, new int[] { 1997, 1, 28, 0, 0, 0 }, "M d"); } public void testDurationsByBruteForce() { bruteForce(2006, 0, 1, "d", Calendar.DAY_OF_MONTH); bruteForce(2006, 0, 2, "d", Calendar.DAY_OF_MONTH); bruteForce(2007, 1, 2, "d", Calendar.DAY_OF_MONTH); bruteForce(2004, 1, 29, "d", Calendar.DAY_OF_MONTH); bruteForce(1996, 1, 29, "d", Calendar.DAY_OF_MONTH); bruteForce(1969, 1, 28, "M", Calendar.MONTH); // tests for 48 years //bruteForce(1996, 1, 29, "M", Calendar.MONTH); // this will fail } private final int FOUR_YEARS = 365 * 3 + 366; // Takes a minute to run, so generally turned off // public void testBrutally() { // Calendar c = Calendar.getInstance(); // c.set(2004, 0, 1, 0, 0, 0); // for (int i=0; i < FOUR_YEARS; i++) { // bruteForce(c.get(Calendar.YEAR), c.get(Calendar.MONTH), c.get(Calendar.DAY_OF_MONTH), "d", Calendar.DAY_OF_MONTH ); // c.add(Calendar.DAY_OF_MONTH, 1); // } // } private void bruteForce(int year, int month, int day, String format, int calendarType) { String msg = year + "-" + month + "-" + day + " to "; Calendar c = Calendar.getInstance(); c.set(year, month, day, 0, 0, 0); int[] array1 = new int[] { year, month, day, 0, 0, 0 }; int[] array2 = new int[] { year, month, day, 0, 0, 0 }; for (int i=0; i < FOUR_YEARS; i++) { array2[0] = c.get(Calendar.YEAR); array2[1] = c.get(Calendar.MONTH); array2[2] = c.get(Calendar.DAY_OF_MONTH); String tmpMsg = msg + array2[0] + "-" + array2[1] + "-" + array2[2] + " at "; assertEqualDuration( tmpMsg + i, Integer.toString(i), array1, array2, format ); c.add(calendarType, 1); } } private void assertEqualDuration(String expected, int[] start, int[] end, String format) { assertEqualDuration(null, expected, start, end, format); } private void assertEqualDuration(String message, String expected, int[] start, int[] end, String format) { Calendar cal1 = Calendar.getInstance(); cal1.set(start[0], start[1], start[2], start[3], start[4], start[5]); cal1.set(Calendar.MILLISECOND, 0); Calendar cal2 = Calendar.getInstance(); cal2.set(end[0], end[1], end[2], end[3], end[4], end[5]); cal2.set(Calendar.MILLISECOND, 0); long milli1 = cal1.getTime().getTime(); long milli2 = cal2.getTime().getTime(); String result = DurationFormatUtils.formatPeriod(milli1, milli2, format); if (message == null) { assertEquals(expected, result); } else { assertEquals(message, expected, result); } } private void assertArrayEquals(DurationFormatUtils.Token[] obj1, DurationFormatUtils.Token[] obj2) { assertEquals("Arrays are unequal length. ", obj1.length, obj2.length); for (int i = 0; i < obj1.length; i++) { assertTrue("Index " + i + " not equal, " + obj1[i] + " vs " + obj2[i], obj1[i].equals(obj2[i])); } } }
Make field static. git-svn-id: bab3daebc4e66440cbcc4aded890e63215874748@1145339 13f79535-47bb-0310-9956-ffa450edef68
src/test/java/org/apache/commons/lang3/time/DurationFormatUtilsTest.java
Make field static.
Java
bsd-3-clause
2d6606b69ecda605419d12997e333a060dec3be0
0
CBIIT/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,NCIP/caaers
package gov.nih.nci.cabig.caaers.web.ae; import gov.nih.nci.cabig.caaers.api.AdeersReportGenerator; import gov.nih.nci.cabig.caaers.api.AdverseEventReportSerializer; import gov.nih.nci.cabig.caaers.dao.ExpeditedAdverseEventReportDao; import gov.nih.nci.cabig.caaers.domain.ExpeditedAdverseEventReport; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.OutputStream; import java.rmi.RemoteException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.validation.BindException; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.AbstractCommandController; public class GenerateExpeditedPdfController extends AbstractCommandController { public GenerateExpeditedPdfController() { setCommandClass(GenerateExpeditedPdfCommand.class); } private void generateOutput(String outFile,HttpServletResponse response,String reportId) throws Exception{ String tempDir = System.getProperty("java.io.tmpdir"); File file = new File(tempDir+File.separator+outFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename="+outFile ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); } @Override protected ModelAndView handle(HttpServletRequest request, HttpServletResponse response, Object arg2, BindException arg3) throws Exception { String tempDir = System.getProperty("java.io.tmpdir"); String reportId = request.getParameter("aeReport"); String format = request.getParameter("format"); try { ExpeditedAdverseEventReportDao expeditedAdverseEventReportDao = (ExpeditedAdverseEventReportDao)getApplicationContext().getBean("expeditedAdverseEventReportDao"); ExpeditedAdverseEventReport aeReport = expeditedAdverseEventReportDao.getById(Integer.parseInt(reportId)); AdverseEventReportSerializer ser = new AdverseEventReportSerializer(); String xml = ser.serialize(aeReport); //System.out.print(xml); if (format.equals("pdf")) { String pdfOutFile = "expeditedAdverseEventReport-"+reportId+".pdf"; AdeersReportGenerator gen = new AdeersReportGenerator(); gen.generatePdf(xml,tempDir+File.separator+pdfOutFile); generateOutput(pdfOutFile,response,reportId); /* File file = new File(pdfOutFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename=expeditedAdverseEventReport-"+reportId+".pdf" ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); */ } else if (format.equals("medwatchpdf")) { String pdfOutFile = "MedWatchReport-"+reportId+".pdf"; AdeersReportGenerator gen = new AdeersReportGenerator(); gen.generateMedwatchPdf(xml,tempDir+File.separator+pdfOutFile); generateOutput(pdfOutFile,response,reportId); /* File file = new File(pdfOutFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename=expeditedAdverseEventReport-"+reportId+".pdf" ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); */ } else if (format.equals("dcp")) { String pdfOutFile = "dcp-"+reportId+".pdf"; AdeersReportGenerator gen = new AdeersReportGenerator(); gen.generateDcpSaeForm(xml, tempDir+File.separator+pdfOutFile); generateOutput(pdfOutFile,response,reportId); } else { String xmlOutFile = "expeditedAdverseEventReport-"+reportId+".xml"; BufferedWriter outw = new BufferedWriter(new FileWriter(tempDir+File.separator+xmlOutFile)); outw.write(xml); outw.close(); generateOutput(xmlOutFile,response,reportId); /* File file = new File(xmlOutFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename=expeditedAdverseEventReport-"+reportId+".xml" ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); */ } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); throw new RemoteException ("Error generating PDF ",e); } return null; } }
projects/web/src/main/java/gov/nih/nci/cabig/caaers/web/ae/GenerateExpeditedPdfController.java
package gov.nih.nci.cabig.caaers.web.ae; import gov.nih.nci.cabig.caaers.api.AdeersReportGenerator; import gov.nih.nci.cabig.caaers.api.AdverseEventReportSerializer; import gov.nih.nci.cabig.caaers.dao.ExpeditedAdverseEventReportDao; import gov.nih.nci.cabig.caaers.domain.ExpeditedAdverseEventReport; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.OutputStream; import java.rmi.RemoteException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.validation.BindException; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.AbstractCommandController; public class GenerateExpeditedPdfController extends AbstractCommandController { public GenerateExpeditedPdfController() { setCommandClass(GenerateExpeditedPdfCommand.class); } private void generateOutput(String outFile,HttpServletResponse response,String reportId) throws Exception{ String tempDir = System.getProperty("java.io.tmpdir"); File file = new File(tempDir+File.separator+outFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename="+outFile ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); } @Override protected ModelAndView handle(HttpServletRequest request, HttpServletResponse response, Object arg2, BindException arg3) throws Exception { String tempDir = System.getProperty("java.io.tmpdir"); String reportId = request.getParameter("aeReport"); String format = request.getParameter("format"); try { ExpeditedAdverseEventReportDao expeditedAdverseEventReportDao = (ExpeditedAdverseEventReportDao)getApplicationContext().getBean("expeditedAdverseEventReportDao"); ExpeditedAdverseEventReport aeReport = expeditedAdverseEventReportDao.getById(Integer.parseInt(reportId)); AdverseEventReportSerializer ser = new AdverseEventReportSerializer(); String xml = ser.serialize(aeReport); //System.out.print(xml); if (format.equals("pdf")) { String pdfOutFile = "expeditedAdverseEventReport-"+reportId+".pdf"; AdeersReportGenerator gen = new AdeersReportGenerator(); gen.genatePdf(xml,tempDir+File.separator+pdfOutFile); generateOutput(pdfOutFile,response,reportId); /* File file = new File(pdfOutFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename=expeditedAdverseEventReport-"+reportId+".pdf" ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); */ } else if (format.equals("medwatchpdf")) { String pdfOutFile = "MedWatchReport-"+reportId+".pdf"; AdeersReportGenerator gen = new AdeersReportGenerator(); gen.genateMedwatchPdf(xml,tempDir+File.separator+pdfOutFile); generateOutput(pdfOutFile,response,reportId); /* File file = new File(pdfOutFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename=expeditedAdverseEventReport-"+reportId+".pdf" ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); */ } else if (format.equals("dcp")) { String pdfOutFile = "dcp-"+reportId+".pdf"; AdeersReportGenerator gen = new AdeersReportGenerator(); gen.genateDcpSaeForm(xml, tempDir+File.separator+pdfOutFile); generateOutput(pdfOutFile,response,reportId); } else { String xmlOutFile = "expeditedAdverseEventReport-"+reportId+".xml"; BufferedWriter outw = new BufferedWriter(new FileWriter(tempDir+File.separator+xmlOutFile)); outw.write(xml); outw.close(); generateOutput(xmlOutFile,response,reportId); /* File file = new File(xmlOutFile); FileInputStream fileIn = new FileInputStream(file); OutputStream out = response.getOutputStream(); response.setContentType( "application/x-download" ); response.setHeader( "Content-Disposition", "attachment; filename=expeditedAdverseEventReport-"+reportId+".xml" ); byte[] buffer = new byte[2048]; int bytesRead = fileIn.read(buffer); while (bytesRead >= 0) { if (bytesRead > 0) out.write(buffer, 0, bytesRead); bytesRead = fileIn.read(buffer); } out.flush(); out.close(); fileIn.close(); */ } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); throw new RemoteException ("Error generating PDF ",e); } return null; } }
SVN-Revision: 4132
projects/web/src/main/java/gov/nih/nci/cabig/caaers/web/ae/GenerateExpeditedPdfController.java
Java
mit
9e5ba1ff9af5062e0e0b87c2f95af1c98bf7c90d
0
Exslims/MercuryTrade
package com.mercury.platform.diff; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.stream.Collectors; import java.util.zip.ZipEntry; /** * Created by Frost on 02.01.2017. */ public class DiffChecker { public List<String> calculateDifference(JarFile first, JarFile second) { List<JarEntry> firstEntries = Collections.list(first.entries()); List<JarEntry> secondEntries = Collections.list(second.entries()); firstEntries.sort(new JarEntryComparator()); secondEntries.sort(new JarEntryComparator()); JarEntryComparator comparator = new JarEntryComparator(); List<String> difference; if (firstEntries.size() > secondEntries.size()) { firstEntries.removeAll(secondEntries); difference = firstEntries.stream().map(ZipEntry::toString).collect(Collectors.toList()); } else { secondEntries.removeAll(firstEntries); difference = secondEntries.stream().map(ZipEntry::toString).collect(Collectors.toList()); } return difference; } }
app-socket-server/src/main/java/com/mercury/platform/diff/DiffChecker.java
package com.mercury.platform.diff; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.stream.Collectors; import java.util.zip.ZipEntry; /** * Created by Frost on 02.01.2017. */ public class DiffChecker { public List<String> calculateDifference(JarFile first, JarFile second) { List<JarEntry> firstEntries = Collections.list(first.entries()); List<JarEntry> secondEntries = Collections.list(second.entries()); firstEntries.sort(new JarEntryComparator()); secondEntries.sort(new JarEntryComparator()); JarEntryComparator comparator = new JarEntryComparator(); List<String> difference; if (firstEntries.size() > secondEntries.size()) { firstEntries.removeAll(secondEntries); difference = firstEntries.stream().map(ZipEntry::toString).collect(Collectors.toList()); } else { secondEntries.removeAll(firstEntries); difference = secondEntries.stream().map(ZipEntry::toString).collect(Collectors.toList()); } return difference; } }
DiffChecker refactoring
app-socket-server/src/main/java/com/mercury/platform/diff/DiffChecker.java
DiffChecker refactoring
Java
mit
c028ebc111eed38b6cccfc8c67b8a1926796e219
0
wfwalker/jcavern
package jcavern; import java.util.Observable; /** * Parent class for players, monsters, and trees. * * @author Bill Walker */ public abstract class Thing extends Observable implements Cloneable { /** * The name of this thing. */ private String mName; /** * Creates a new thing for the given world and name. */ public Thing(String aName) { mName = aName; } public String toString() { return getClass().getName() + "." + mName; } /** * Returns the name of this thing. */ protected String getName() { return mName; } public void doTurn(World aWorld) throws JCavernInternalError { } public abstract Object clone(); /** * Returns the one character string that is the appearance of this thing. */ public String getAppearance() { return "?"; } }
sources/jcavern/Thing.java
package jcavern; import java.util.Observable; /** * Parent class for players, monsters, and trees. * * @author Bill Walker */ public abstract class Thing extends Observable implements Cloneable { /** * The name of this thing. */ private String mName; /** * Creates a new thing for the given world and name. */ public Thing(String aName) { mName = aName; } public String toString() { return getClass().getName() + "." + mName; } /** * Returns the name of this thing. */ protected String getName() { return mName; } public void doTurn(World aWorld) throws NoSuchThingException { } public boolean isCombatant() { return false; } /** * Returns the one character string that is the appearance of this thing. */ public abstract String getAppearance(); }
made getAppearance non-abstract; added cloning implementation
sources/jcavern/Thing.java
made getAppearance non-abstract; added cloning implementation
Java
mit
28719462712fdb4bee9fcbf5630f9256c4e02c82
0
mini2Dx/jarn,mini2Dx/jarn,mini2Dx/jarn
/** * MIT License * * Copyright (c) 2017 Thomas Cashman * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.mini2Dx.yarn.operation; import java.util.ArrayList; import java.util.List; /** * Groups several {@link YarnOperation} */ public abstract class YarnOperationGroup extends YarnOperation { protected final List<YarnOperation> operations = new ArrayList<YarnOperation>(2); public YarnOperationGroup(int operationIndex, int lineNumber) { super(operationIndex, lineNumber); } @Override protected <T extends YarnOperation> void getOperationsOfType(Class<T> clazz, List<T> result) { for(YarnOperation operation : operations) { operation.getOperationsOfType(clazz, result); } } public void appendOperation(YarnOperation operation) { operations.add(operation); } public int getTotalOperations() { return operations.size(); } }
jarn/src/main/java/org/mini2Dx/yarn/operation/YarnOperationGroup.java
/** * MIT License * * Copyright (c) 2017 Thomas Cashman * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.mini2Dx.yarn.operation; import java.util.ArrayList; import java.util.List; /** * Groups several {@link YarnOperation} */ public abstract class YarnOperationGroup extends YarnOperation { protected final List<YarnOperation> operations = new ArrayList<YarnOperation>(2); public YarnOperationGroup(int operationIndex, int lineNumber) { super(operationIndex, lineNumber); } @Override public <T extends YarnOperation> void getOperationsOfType(Class<T> clazz, List<T> result) { for(YarnOperation operation : operations) { operation.getOperationsOfType(clazz, result); } } public void appendOperation(YarnOperation operation) { operations.add(operation); } public int getTotalOperations() { return operations.size(); } }
Fixed incorrect method visibility
jarn/src/main/java/org/mini2Dx/yarn/operation/YarnOperationGroup.java
Fixed incorrect method visibility
Java
mit
ad3bc16c6b37609f6631a7fc2c79e48a1db82725
0
typingincolor/galen
package info.losd.galen.repository; import info.losd.galen.repository.dto.Healthcheck; import info.losd.galen.repository.dto.HealthcheckDetails; import info.losd.galen.repository.dto.HealthcheckMean; import info.losd.galen.repository.dto.HealthcheckStatistic; import org.influxdb.InfluxDB; import org.influxdb.dto.Point; import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.concurrent.TimeUnit; /** * The MIT License (MIT) * <p> * Copyright (c) 2015 Andrew Braithwaite * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ @Repository public class InfluxdbHealthcheckRepo implements HealthcheckRepo { @Autowired InfluxDB influxDB; private Logger logger = LoggerFactory.getLogger(InfluxdbHealthcheckRepo.class); @Override public void save(HealthcheckDetails s) { Point point = Point.measurement("statistic") .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) .tag("healthcheck", s.getHealthcheck().getName()) .field("response_time", s.getDuration()) .field("status_code", s.getStatusCode()) .build(); influxDB.write("galen", "default", point); } @Override public List<Healthcheck> getHealthchecks() { Query query = new Query("SHOW TAG VALUES FROM statistic WITH KEY = healthcheck", "galen"); QueryResult apiList = influxDB.query(query); List<Healthcheck> healthchecks = new LinkedList<>(); try { apiList.getResults().get(0).getSeries().get(0).getValues().forEach(value -> healthchecks.add(new Healthcheck((String) value.get(0)))); } catch (Exception e) { logger.info("Returning empty list of health checks: {}", e.getMessage()); return Collections.<Healthcheck>emptyList(); } return healthchecks; } @Override public List<HealthcheckStatistic> getStatisticsForPeriod(String healthcheck, Period period) { String queryString = String.format( "SELECT time, response_time, status_code FROM statistic WHERE time > now() - %s AND healthcheck = '%s'" , period.toString() , healthcheck); Query query = new Query(queryString, "galen"); QueryResult healthcheckLost = influxDB.query(query); List<HealthcheckStatistic> statistics = new LinkedList<>(); try { healthcheckLost.getResults().get(0).getSeries().get(0).getValues().forEach(value -> { statistics.add(new HealthcheckStatistic((String) value.get(0), (int) value.get(1), (int) value.get(2))); }); } catch (Exception e) { logger.info("Returning empty list of statistics: {}", e.getMessage()); return Collections.<HealthcheckStatistic>emptyList(); } return statistics; } @Override public HealthcheckMean getMeanForPeriod(String healthcheck, Period period) throws MeanNotCalculatedException { String queryString = String.format( "SELECT mean(response_time) FROM statistic WHERE time > now() - %s AND healthcheck = '%s'" , period.toString() , healthcheck); Query query = new Query(queryString, "galen"); QueryResult healthcheckLost = influxDB.query(query); try { List<Object> value = healthcheckLost.getResults().get(0).getSeries().get(0).getValues().get(0); return new HealthcheckMean((String) value.get(0), (double) value.get(1)); } catch (Exception e) { logger.info("Unable to calculate mean: {}", e.getMessage()); throw new MeanNotCalculatedException(e); } } }
src/main/java/info/losd/galen/repository/InfluxdbHealthcheckRepo.java
package info.losd.galen.repository; import info.losd.galen.repository.dto.Healthcheck; import info.losd.galen.repository.dto.HealthcheckDetails; import info.losd.galen.repository.dto.HealthcheckMean; import info.losd.galen.repository.dto.HealthcheckStatistic; import org.influxdb.InfluxDB; import org.influxdb.dto.Point; import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.concurrent.TimeUnit; /** * The MIT License (MIT) * <p> * Copyright (c) 2015 Andrew Braithwaite * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ @Repository public class InfluxdbHealthcheckRepo implements HealthcheckRepo { @Autowired InfluxDB influxDB; private Logger logger = LoggerFactory.getLogger(InfluxdbHealthcheckRepo.class); @Override public void save(HealthcheckDetails s) { Point point = Point.measurement("statistic") .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) .tag("healthcheck", s.getHealthcheck().getName()) .field("response_time", s.getDuration()) .field("status_code", s.getStatusCode()) .build(); influxDB.write("galen", "default", point); } @Override public List<Healthcheck> getHealthchecks() { Query query = new Query("SHOW TAG VALUES FROM statistic WITH KEY = healthcheck", "galen"); QueryResult apiList = influxDB.query(query); List<Healthcheck> healthchecks = new LinkedList<>(); try { apiList.getResults().get(0).getSeries().get(0).getValues().forEach(value -> healthchecks.add(new Healthcheck((String) value.get(0)))); } catch (Exception e) { logger.info("Returning empty list of health checks {}", e.getMessage()); return Collections.<Healthcheck>emptyList(); } return healthchecks; } @Override public List<HealthcheckStatistic> getStatisticsForPeriod(String healthcheck, Period period) { String queryString = String.format( "SELECT time, response_time, status_code FROM statistic WHERE time > now() - %s AND healthcheck = '%s'" , period.toString() , healthcheck); Query query = new Query(queryString, "galen"); QueryResult healthcheckLost = influxDB.query(query); List<HealthcheckStatistic> statistics = new LinkedList<>(); try { healthcheckLost.getResults().get(0).getSeries().get(0).getValues().forEach(value -> { statistics.add(new HealthcheckStatistic((String) value.get(0), (int) value.get(1), (int) value.get(2))); }); } catch (Exception e) { logger.info("Returning empty list of statistics {}", e.getMessage()); return Collections.<HealthcheckStatistic>emptyList(); } return statistics; } @Override public HealthcheckMean getMeanForPeriod(String healthcheck, Period period) throws MeanNotCalculatedException { String queryString = String.format( "SELECT mean(response_time) FROM statistic WHERE time > now() - %s AND healthcheck = '%s'" , period.toString() , healthcheck); Query query = new Query(queryString, "galen"); QueryResult healthcheckLost = influxDB.query(query); try { List<Object> value = healthcheckLost.getResults().get(0).getSeries().get(0).getValues().get(0); return new HealthcheckMean((String) value.get(0), (double) value.get(1)); } catch (Exception e) { throw new MeanNotCalculatedException(e); } } }
better error messages
src/main/java/info/losd/galen/repository/InfluxdbHealthcheckRepo.java
better error messages
Java
epl-1.0
bf995d259b7372282a41d6e538d2fdd5cc9e85d8
0
miklossy/xtext-core,miklossy/xtext-core
/******************************************************************************* * Copyright (c) 2015 itemis AG (http://www.itemis.eu) and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.eclipse.xtext.formatting2.regionaccess.internal; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.eclipse.emf.ecore.EDataType; import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.Action; import org.eclipse.xtext.Assignment; import org.eclipse.xtext.CrossReference; import org.eclipse.xtext.GrammarUtil; import org.eclipse.xtext.RuleCall; import org.eclipse.xtext.formatting2.regionaccess.ITextRegionAccess; import org.eclipse.xtext.nodemodel.BidiTreeIterator; import org.eclipse.xtext.nodemodel.ICompositeNode; import org.eclipse.xtext.nodemodel.ILeafNode; import org.eclipse.xtext.nodemodel.INode; import org.eclipse.xtext.resource.XtextResource; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; public class NodeModelBasedRegionAccessBuilder { private Map<EObject, NodeEObjectRegion> eObjToTokens; private NodeHiddenRegion firstHidden; private NodeHiddenRegion lastHidden; private XtextResource resource; private LinkedList<NodeEObjectRegion> stack = new LinkedList<NodeEObjectRegion>(); protected void add(NodeModelBasedRegionAccess access, INode node) { if (node instanceof ILeafNode && ((ILeafNode) node).isHidden()) { ILeafNode leafNode = (ILeafNode) node; lastHidden.addPart(createHidden(lastHidden, leafNode)); } else if (node instanceof ICompositeNode || node.getLength() > 0) { NodeEObjectRegion eObjectTokens = stack.peek(); NodeSemanticRegion newSemantic = createSemanticRegion(access, node); NodeHiddenRegion newHidden = createHiddenRegion(access); newSemantic.setTrailingHiddenRegion(newHidden); newHidden.setPrevious(newSemantic); newSemantic.setLeadingHiddenRegion(lastHidden); lastHidden.setNext(newSemantic); eObjectTokens.getSemanticRegions().add(newSemantic); newSemantic.setEObjectTokens(eObjectTokens); lastHidden = newHidden; } } public NodeModelBasedRegionAccess create() { NodeModelBasedRegionAccess access = new NodeModelBasedRegionAccess(this); return access; } protected NodeHidden createHidden(NodeHiddenRegion hidden, ILeafNode node) { if (isComment(node)) return new NodeComment(hidden, node); else return new NodeWhitespace(hidden, node); } protected NodeHiddenRegion createHiddenRegion(ITextRegionAccess access) { return new NodeHiddenRegion(access); } protected NodeSemanticRegion createSemanticRegion(NodeModelBasedRegionAccess access, INode node) { return new NodeSemanticRegion(access, node); } protected NodeEObjectRegion createTokens(NodeModelBasedRegionAccess access, INode node) { return new NodeEObjectRegion(access, node); } protected Map<EObject, AbstractEObjectRegion> getEObjectToTokensMap(ITextRegionAccess regionAccess) { this.eObjToTokens = Maps.newHashMap(); this.firstHidden = createHiddenRegion(regionAccess); this.lastHidden = this.firstHidden; NodeModelBasedRegionAccess access = (NodeModelBasedRegionAccess) regionAccess; ICompositeNode rootNode = resource.getParseResult().getRootNode(); process(rootNode, access); return ImmutableMap.<EObject, AbstractEObjectRegion> copyOf(this.eObjToTokens); } protected XtextResource getXtextResource() { return resource; } protected boolean include(INode node) { if (node instanceof ILeafNode) { return true; } else if (node instanceof ICompositeNode) { EObject element = node.getGrammarElement(); return GrammarUtil.isDatatypeRuleCall(element) || element instanceof CrossReference || GrammarUtil.isEnumRuleCall(element); } return false; } protected boolean isComment(ILeafNode leaf) { String text = leaf.getText(); for (int i = 0; i < text.length(); i++) if (!Character.isWhitespace(text.charAt(i))) return true; return false; } protected boolean isEObjectRoot(INode node) { if (node instanceof ICompositeNode) { ICompositeNode parent = node.getParent(); while (parent != null && GrammarUtil.isEObjectFragmentRuleCall(parent.getGrammarElement())) parent = parent.getParent(); if (parent == null) return true; INode root = parent; while (root != null && !root.hasDirectSemanticElement()) root = root.getParent(); if (root == null) return false; EObject element = node.getGrammarElement(); if (GrammarUtil.isEObjectRuleCall(element) && !GrammarUtil.isEObjectFragmentRuleCall(element)) { if (!parent.hasDirectSemanticElement()) return false; BidiTreeIterator<INode> iterator = node.getAsTreeIterable().iterator(); iterator.next(); while (iterator.hasNext()) { INode next = iterator.next(); if (next.hasDirectSemanticElement()) return true; EObject ge = next.getGrammarElement(); if (ge instanceof Action) return true; if (ge instanceof RuleCall && GrammarUtil.isAssigned(ge) && ((RuleCall) ge).getRule().getType().getClassifier() instanceof EDataType) return true; if (ge instanceof CrossReference) return true; } } if (element instanceof Action) { return parent.hasDirectSemanticElement(); } } return false; } protected EObject findGrammarElement(INode node, EObject obj) { INode current = node; String feature = obj.eContainingFeature().getName(); while (current != null) { EObject grammarElement = current.getGrammarElement(); Assignment assignment = GrammarUtil.containingAssignment(grammarElement); if (assignment != null && feature.equals(assignment.getFeature())) return grammarElement; if (grammarElement instanceof Action) { Action action = (Action) grammarElement; if (feature.equals(action.getFeature())) return grammarElement; else if (current == node && current instanceof ICompositeNode) { INode child = ((ICompositeNode) current).getFirstChild(); while (child instanceof ICompositeNode) { EObject grammarElement2 = child.getGrammarElement(); Assignment assignment2 = GrammarUtil.containingAssignment(grammarElement2); if (assignment2 != null && feature.equals(assignment2.getFeature())) return grammarElement2; // if (child.hasDirectSemanticElement() && child.getSemanticElement() != obj) // break; child = ((ICompositeNode) child).getFirstChild(); } } } if (current.hasDirectSemanticElement() && current.getSemanticElement() != obj) return null; current = current.getParent(); } return null; } protected void process(INode node, NodeModelBasedRegionAccess access) { NodeEObjectRegion tokens = stack.peek(); boolean creator = isEObjectRoot(node); if (creator || tokens == null) { tokens = new NodeEObjectRegion(access, node); tokens.setLeadingHiddenRegion(lastHidden); stack.push(tokens); } if (tokens.getSemanticElement() == null) { if (node.getParent() == null) { tokens.setSemanticElement(resource.getContents().get(0)); EObject element = node.getGrammarElement(); if (element instanceof Action) element = ((ICompositeNode) node).getFirstChild().getGrammarElement(); tokens.setGrammarElement(element); } else if (node.hasDirectSemanticElement()) { tokens.setSemanticElement(node.getSemanticElement()); tokens.setGrammarElement(findGrammarElement(node, tokens.getSemanticElement())); } } if (include(node)) { if (node instanceof ICompositeNode) { for (ILeafNode leaf : node.getLeafNodes()) if (leaf.isHidden()) this.add(access, leaf); else break; } this.add(access, node); } else if (node instanceof ICompositeNode) { for (INode child : ((ICompositeNode) node).getChildren()) process(child, access); } if (creator) { NodeEObjectRegion popped = stack.pop(); popped.setTrailingHiddenRegion(lastHidden); EObject semanticElement = popped.getSemanticElement(); if (semanticElement == null) throw new IllegalStateException(); if (!stack.isEmpty() && semanticElement.eContainer() != stack.peek().getSemanticElement()) throw new IllegalStateException(); EObject grammarElement = popped.getGrammarElement(); if (grammarElement == null) { throw new IllegalStateException(); } NodeEObjectRegion old = eObjToTokens.put(semanticElement, popped); if (old != null) throw new IllegalStateException(); } } public NodeModelBasedRegionAccessBuilder withResource(XtextResource resource) { this.resource = resource; return this; } protected NodeHiddenRegion getFirstHidden() { return firstHidden; } protected void setFirstHidden(NodeHiddenRegion firstHidden) { this.firstHidden = firstHidden; } protected NodeHiddenRegion getLastHidden() { return lastHidden; } protected void setLastHidden(NodeHiddenRegion lastHidden) { this.lastHidden = lastHidden; } protected List<NodeEObjectRegion> getStack() { return stack; } }
org.eclipse.xtext/src/org/eclipse/xtext/formatting2/regionaccess/internal/NodeModelBasedRegionAccessBuilder.java
/******************************************************************************* * Copyright (c) 2015 itemis AG (http://www.itemis.eu) and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.eclipse.xtext.formatting2.regionaccess.internal; import java.util.LinkedList; import java.util.Map; import org.eclipse.emf.ecore.EDataType; import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.Action; import org.eclipse.xtext.Assignment; import org.eclipse.xtext.CrossReference; import org.eclipse.xtext.GrammarUtil; import org.eclipse.xtext.RuleCall; import org.eclipse.xtext.formatting2.regionaccess.ITextRegionAccess; import org.eclipse.xtext.nodemodel.BidiTreeIterator; import org.eclipse.xtext.nodemodel.ICompositeNode; import org.eclipse.xtext.nodemodel.ILeafNode; import org.eclipse.xtext.nodemodel.INode; import org.eclipse.xtext.resource.XtextResource; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; public class NodeModelBasedRegionAccessBuilder { private Map<EObject, NodeEObjectRegion> eObjToTokens; private NodeHiddenRegion firstHidden; private NodeHiddenRegion lastHidden; private XtextResource resource; private LinkedList<NodeEObjectRegion> stack = new LinkedList<NodeEObjectRegion>(); protected void add(NodeModelBasedRegionAccess access, INode node) { if (node instanceof ILeafNode && ((ILeafNode) node).isHidden()) { ILeafNode leafNode = (ILeafNode) node; lastHidden.addPart(createHidden(lastHidden, leafNode)); } else if (node instanceof ICompositeNode || node.getLength() > 0) { NodeEObjectRegion eObjectTokens = stack.peek(); NodeSemanticRegion newSemantic = createSemanticRegion(access, node); NodeHiddenRegion newHidden = createHiddenRegion(access); newSemantic.setTrailingHiddenRegion(newHidden); newHidden.setPrevious(newSemantic); newSemantic.setLeadingHiddenRegion(lastHidden); lastHidden.setNext(newSemantic); eObjectTokens.getSemanticRegions().add(newSemantic); newSemantic.setEObjectTokens(eObjectTokens); lastHidden = newHidden; } } public NodeModelBasedRegionAccess create() { NodeModelBasedRegionAccess access = new NodeModelBasedRegionAccess(this); return access; } protected NodeHidden createHidden(NodeHiddenRegion hidden, ILeafNode node) { if (isComment(node)) return new NodeComment(hidden, node); else return new NodeWhitespace(hidden, node); } protected NodeHiddenRegion createHiddenRegion(ITextRegionAccess access) { return new NodeHiddenRegion(access); } protected NodeSemanticRegion createSemanticRegion(NodeModelBasedRegionAccess access, INode node) { return new NodeSemanticRegion(access, node); } protected NodeEObjectRegion createTokens(NodeModelBasedRegionAccess access, INode node) { return new NodeEObjectRegion(access, node); } protected Map<EObject, AbstractEObjectRegion> getEObjectToTokensMap(ITextRegionAccess regionAccess) { this.eObjToTokens = Maps.newHashMap(); this.firstHidden = createHiddenRegion(regionAccess); this.lastHidden = this.firstHidden; NodeModelBasedRegionAccess access = (NodeModelBasedRegionAccess) regionAccess; ICompositeNode rootNode = resource.getParseResult().getRootNode(); process(rootNode, access); return ImmutableMap.<EObject, AbstractEObjectRegion> copyOf(this.eObjToTokens); } protected XtextResource getXtextResource() { return resource; } protected boolean include(INode node) { if (node instanceof ILeafNode) { return true; } else if (node instanceof ICompositeNode) { EObject element = node.getGrammarElement(); return GrammarUtil.isDatatypeRuleCall(element) || element instanceof CrossReference || GrammarUtil.isEnumRuleCall(element); } return false; } protected boolean isComment(ILeafNode leaf) { String text = leaf.getText(); for (int i = 0; i < text.length(); i++) if (!Character.isWhitespace(text.charAt(i))) return true; return false; } protected boolean isEObjectRoot(INode node) { if (node instanceof ICompositeNode) { ICompositeNode parent = node.getParent(); while (parent != null && GrammarUtil.isEObjectFragmentRuleCall(parent.getGrammarElement())) parent = parent.getParent(); if (parent == null) return true; INode root = parent; while (root != null && !root.hasDirectSemanticElement()) root = root.getParent(); if (root == null) return false; EObject element = node.getGrammarElement(); if (GrammarUtil.isEObjectRuleCall(element) && !GrammarUtil.isEObjectFragmentRuleCall(element)) { if (!parent.hasDirectSemanticElement()) return false; BidiTreeIterator<INode> iterator = node.getAsTreeIterable().iterator(); iterator.next(); while (iterator.hasNext()) { INode next = iterator.next(); if (next.hasDirectSemanticElement()) return true; EObject ge = next.getGrammarElement(); if (ge instanceof Action) return true; if (ge instanceof RuleCall && GrammarUtil.isAssigned(ge) && ((RuleCall) ge).getRule().getType().getClassifier() instanceof EDataType) return true; if (ge instanceof CrossReference) return true; } } if (element instanceof Action) { return parent.hasDirectSemanticElement(); } } return false; } protected EObject findGrammarElement(INode node, EObject obj) { INode current = node; String feature = obj.eContainingFeature().getName(); while (current != null) { EObject grammarElement = current.getGrammarElement(); Assignment assignment = GrammarUtil.containingAssignment(grammarElement); if (assignment != null && feature.equals(assignment.getFeature())) return grammarElement; if (grammarElement instanceof Action) { Action action = (Action) grammarElement; if (feature.equals(action.getFeature())) return grammarElement; else if (current == node && current instanceof ICompositeNode) { INode child = ((ICompositeNode) current).getFirstChild(); while (child instanceof ICompositeNode) { EObject grammarElement2 = child.getGrammarElement(); Assignment assignment2 = GrammarUtil.containingAssignment(grammarElement2); if (assignment2 != null && feature.equals(assignment2.getFeature())) return grammarElement2; // if (child.hasDirectSemanticElement() && child.getSemanticElement() != obj) // break; child = ((ICompositeNode) child).getFirstChild(); } } } if (current.hasDirectSemanticElement() && current.getSemanticElement() != obj) return null; current = current.getParent(); } return null; } protected void process(INode node, NodeModelBasedRegionAccess access) { NodeEObjectRegion tokens = stack.peek(); boolean creator = isEObjectRoot(node); if (creator || tokens == null) { tokens = new NodeEObjectRegion(access, node); tokens.setLeadingHiddenRegion(lastHidden); stack.push(tokens); } if (tokens.getSemanticElement() == null) { if (node.getParent() == null) { tokens.setSemanticElement(resource.getContents().get(0)); EObject element = node.getGrammarElement(); if (element instanceof Action) element = ((ICompositeNode) node).getFirstChild().getGrammarElement(); tokens.setGrammarElement(element); } else if (node.hasDirectSemanticElement()) { tokens.setSemanticElement(node.getSemanticElement()); tokens.setGrammarElement(findGrammarElement(node, tokens.getSemanticElement())); } } if (include(node)) { if (node instanceof ICompositeNode) { for (ILeafNode leaf : node.getLeafNodes()) if (leaf.isHidden()) this.add(access, leaf); else break; } this.add(access, node); } else if (node instanceof ICompositeNode) { for (INode child : ((ICompositeNode) node).getChildren()) process(child, access); } if (creator) { NodeEObjectRegion popped = stack.pop(); popped.setTrailingHiddenRegion(lastHidden); EObject semanticElement = popped.getSemanticElement(); if (semanticElement == null) throw new IllegalStateException(); if (!stack.isEmpty() && semanticElement.eContainer() != stack.peek().getSemanticElement()) throw new IllegalStateException(); EObject grammarElement = popped.getGrammarElement(); if (grammarElement == null) { throw new IllegalStateException(); } NodeEObjectRegion old = eObjToTokens.put(semanticElement, popped); if (old != null) throw new IllegalStateException(); } } public NodeModelBasedRegionAccessBuilder withResource(XtextResource resource) { this.resource = resource; return this; } protected NodeHiddenRegion getFirstHidden() { return firstHidden; } protected void setFirstHidden(NodeHiddenRegion firstHidden) { this.firstHidden = firstHidden; } protected NodeHiddenRegion getLastHidden() { return lastHidden; } protected void setLastHidden(NodeHiddenRegion lastHidden) { this.lastHidden = lastHidden; } protected List<NodeEObjectRegion> getStack() { return stack; } }
[textRegionAccess] Fixed missing import. Signed-off-by: Alex Tugarev <[email protected]>
org.eclipse.xtext/src/org/eclipse/xtext/formatting2/regionaccess/internal/NodeModelBasedRegionAccessBuilder.java
[textRegionAccess] Fixed missing import.
Java
agpl-3.0
33ddf0d42d8eee15b6140ba9f7c8aeb12f1d380f
0
elki-project/elki,elki-project/elki,elki-project/elki
package de.lmu.ifi.dbs.elki.algorithm.statistics; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import de.lmu.ifi.dbs.elki.algorithm.DistanceBasedAlgorithm; import de.lmu.ifi.dbs.elki.algorithm.clustering.ByLabelClustering; import de.lmu.ifi.dbs.elki.data.DoubleVector; import de.lmu.ifi.dbs.elki.data.RealVector; import de.lmu.ifi.dbs.elki.data.cluster.Cluster; import de.lmu.ifi.dbs.elki.data.model.Model; import de.lmu.ifi.dbs.elki.database.Database; import de.lmu.ifi.dbs.elki.distance.DoubleDistance; import de.lmu.ifi.dbs.elki.distance.distancefunction.DistanceFunction; import de.lmu.ifi.dbs.elki.math.Histogram; import de.lmu.ifi.dbs.elki.result.CollectionResult; import de.lmu.ifi.dbs.elki.utilities.Description; import de.lmu.ifi.dbs.elki.utilities.Progress; import de.lmu.ifi.dbs.elki.utilities.QueryResult; import de.lmu.ifi.dbs.elki.utilities.pairs.SimplePair; /** * Evaluate a distance function with respect to kNN queries. For each point, the * neighbors are sorted by distance, then the ROC AUC is computed. A score of 1 * means that the distance function provides a perfect ordering of relevant * neighbors first, then irrelevant neighbors. A value of 0.5 can be obtained by * random sorting. A value of 0 means the distance function is inverted, i.e. a * similarity. * * TODO: Make number of bins configurable * * TODO: Add sampling * * @author Erich Schubert */ public class RankingQualityHistogram<V extends RealVector<V, ?>> extends DistanceBasedAlgorithm<V, DoubleDistance, CollectionResult<DoubleVector>> { private CollectionResult<DoubleVector> result; /** * Empty constructor. Nothing to do. */ public RankingQualityHistogram() { super(); } /** * Run the algorithm. */ @Override protected CollectionResult<DoubleVector> runInTime(Database<V> database) throws IllegalStateException { DistanceFunction<V, DoubleDistance> distFunc = getDistanceFunction(); distFunc.setDatabase(database, isVerbose(), isTime()); // local copy, not entirely necessary. I just like control, guaranteed // sequences // and stable+efficient array index -> id lookups. ArrayList<Integer> ids = new ArrayList<Integer>(database.getIDs()); int size = ids.size(); if(isVerbose()) { verbose("Preprocessing clusters..."); } // Cluster by labels ByLabelClustering<V> split = new ByLabelClustering<V>(); Set<Cluster<Model>> splitted = split.run(database).getAllClusters(); Histogram<Double> hist = Histogram.DoubleHistogram(100, 0.0, 1.0); if(isVerbose()) { verbose("Processing points..."); } Progress rocloop = new Progress("ROC computation loop ...", size); int rocproc = 0; // sort neighbors for(Cluster<?> clus : splitted) { for(Integer i1 : clus.getIDs()) { List<QueryResult<DoubleDistance>> knn = database.kNNQueryForID(i1, size, distFunc); double result = computeROCAUC(size, clus, knn); hist.put(result, hist.get(result) + 1./size); if(isVerbose()) { rocproc++; rocloop.setProcessed(rocproc); progress(rocloop); } } } if(isVerbose()) { verbose(""); } // Transform Histogram into a Double Vector array. Collection<DoubleVector> res = new ArrayList<DoubleVector>(size); for (SimplePair<Double, Double> pair : hist) { DoubleVector row = new DoubleVector(new double[] { pair.getFirst(), pair.getSecond() }); res.add(row); } result = new CollectionResult<DoubleVector>(res); return result; } /** * Compute a ROC curves Area-under-curve. * * @param size * @param clus * @param nei * @return area under curve */ // TODO: make static, move into utilities. private double computeROCAUC(int size, Cluster<?> clus, List<QueryResult<DoubleDistance>> nei) { int postot = clus.size(); int negtot = size - postot; int poscnt = 0; int negcnt = 0; double lastpos = 0.0; double lastneg = 0.0; double result = 0.0; Collection<Integer> ids = clus.getIDs(); // we need an array since using an integer index is much more convenient // for merging multiple equi-distant neighbors. ArrayList<QueryResult<DoubleDistance>> n = new ArrayList<QueryResult<DoubleDistance>>(nei); // TODO: rewrite to use the original list. We only need 1 lookahead. for (int i = 0; i < n.size(); i++) { QueryResult<DoubleDistance> p = n.get(i); if(ids.contains(p.getID())) { poscnt += 1; } else { negcnt += 1; } // defer calculation if this points distance equals the next points distance. if (i + 1 < n.size()) { if (n.get(i+1).getDistance().compareTo(p.getDistance()) == 0) { continue; } } // since lastfalse and lastpost were last updated double curpos = ((double) poscnt) / postot; double curneg = ((double) negcnt) / negtot; // width * height at half way. result += (curneg - lastneg) * (curpos + lastpos) / 2; lastpos = curpos; lastneg = curneg; } return result; } /** * Describe the algorithm and it's use. */ public Description getDescription() { return new Description("EvaluateRankingQuality", "EvaluateRankingQuality", "Evaluates the effectiveness of a distance function via the obtained rankings.", ""); } /** * Return a result object */ public CollectionResult<DoubleVector> getResult() { return result; } }
src/de/lmu/ifi/dbs/elki/algorithm/statistics/RankingQualityHistogram.java
package de.lmu.ifi.dbs.elki.algorithm.statistics; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import de.lmu.ifi.dbs.elki.algorithm.DistanceBasedAlgorithm; import de.lmu.ifi.dbs.elki.algorithm.clustering.ByLabelClustering; import de.lmu.ifi.dbs.elki.data.DoubleVector; import de.lmu.ifi.dbs.elki.data.RealVector; import de.lmu.ifi.dbs.elki.data.cluster.Cluster; import de.lmu.ifi.dbs.elki.data.model.Model; import de.lmu.ifi.dbs.elki.database.Database; import de.lmu.ifi.dbs.elki.distance.DoubleDistance; import de.lmu.ifi.dbs.elki.distance.distancefunction.DistanceFunction; import de.lmu.ifi.dbs.elki.math.Histogram; import de.lmu.ifi.dbs.elki.result.CollectionResult; import de.lmu.ifi.dbs.elki.utilities.Description; import de.lmu.ifi.dbs.elki.utilities.Progress; import de.lmu.ifi.dbs.elki.utilities.QueryResult; import de.lmu.ifi.dbs.elki.utilities.pairs.SimplePair; /** * Evaluate a distance function with respect to kNN queries. For each point, the * neighbors are sorted by distance, then the ROC AUC is computed. A score of 1 * means that the distance function provides a perfect ordering of relevant * neighbors first, then irrelevant neighbors. A value of 0.5 can be obtained by * random sorting. A value of 0 means the distance function is inverted, i.e. a * similarity. * * TODO: Make number of bins configurable * * TODO: Add sampling * * @author Erich Schubert */ public class RankingQualityHistogram<V extends RealVector<V, ?>> extends DistanceBasedAlgorithm<V, DoubleDistance, CollectionResult<DoubleVector>> { private CollectionResult<DoubleVector> result; /** * Empty constructor. Nothing to do. */ public RankingQualityHistogram() { super(); } /** * Run the algorithm. */ @Override protected CollectionResult<DoubleVector> runInTime(Database<V> database) throws IllegalStateException { DistanceFunction<V, DoubleDistance> distFunc = getDistanceFunction(); distFunc.setDatabase(database, isVerbose(), isTime()); // local copy, not entirely necessary. I just like control, guaranteed // sequences // and stable+efficient array index -> id lookups. ArrayList<Integer> ids = new ArrayList<Integer>(database.getIDs()); int size = ids.size(); if(isVerbose()) { verbose("Preprocessing clusters..."); } // Cluster by labels ByLabelClustering<V> split = new ByLabelClustering<V>(); Set<Cluster<Model>> splitted = split.run(database).getAllClusters(); Histogram<Double> hist = Histogram.DoubleHistogram(100, 0.0, 1.0); if(isVerbose()) { verbose("Processing points..."); } Progress rocloop = new Progress("ROC computation loop ...", size); int rocproc = 0; // sort neighbors for(Cluster<?> clus : splitted) { for(Integer i1 : clus.getIDs()) { List<QueryResult<DoubleDistance>> knn = database.kNNQueryForID(i1, size, distFunc); double result = computeROCAUC(size, clus, knn); hist.put(result, hist.get(result) + 1./size); if(isVerbose()) { rocproc++; rocloop.setProcessed(rocproc); progress(rocloop); } } } if(isVerbose()) { verbose(""); } // Transform Histogram into a Double Vector array. Collection<DoubleVector> res = new ArrayList<DoubleVector>(size); for (SimplePair<Double, Double> pair : hist) { DoubleVector row = new DoubleVector(new double[] { pair.getFirst(), pair.getSecond() }); res.add(row); } result = new CollectionResult<DoubleVector>(res); return result; } /** * Compute a ROC curves Area-under-curve. * * @param size * @param clus * @param nei * @return area under curve */ private double computeROCAUC(int size, Cluster<?> clus, List<QueryResult<DoubleDistance>> nei) { int postot = clus.size(); int negtot = size - postot; int poscur = 0; int negcur = 0; double lastpos = 0.0; double lastfalse = 0.0; double result = 0.0; Collection<Integer> ids = clus.getIDs(); for(QueryResult<DoubleDistance> p : nei) { if(ids.contains(p.getID())) { poscur += 1; } else { negcur += 1; } double posrate = ((double) poscur) / postot; double negrate = ((double) negcur) / negtot; result += (negrate - lastfalse) * lastpos; lastfalse = negrate; lastpos = posrate; } return result; } /** * Describe the algorithm and it's use. */ public Description getDescription() { return new Description("EvaluateRankingQuality", "EvaluateRankingQuality", "Evaluates the effectiveness of a distance function via the obtained rankings.", ""); } /** * Return a result object */ public CollectionResult<DoubleVector> getResult() { return result; } }
Handling of equidistant neighbors in ROC curve.
src/de/lmu/ifi/dbs/elki/algorithm/statistics/RankingQualityHistogram.java
Handling of equidistant neighbors in ROC curve.
Java
apache-2.0
c789946628953b09af46d6ead0b534cf40762790
0
nus-ncl/service-web,nus-ncl/service-web,nus-ncl/service-web,nus-ncl/service-web
package sg.ncl; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.apache.tomcat.util.codec.binary.Base64; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.*; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.ResourceAccessException; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import org.springframework.web.servlet.support.RequestContextUtils; import sg.ncl.domain.*; import sg.ncl.exceptions.*; import sg.ncl.testbed_interface.*; import sg.ncl.testbed_interface.Image; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.validation.Valid; import javax.validation.constraints.NotNull; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.*; import java.util.List; import java.util.regex.Pattern; import static java.time.temporal.TemporalAdjusters.firstDayOfMonth; import static java.time.temporal.TemporalAdjusters.lastDayOfMonth; import static sg.ncl.domain.ExceptionState.*; /** * * Spring Controller * Direct the views to appropriate locations and invoke the respective REST API * * @author Cassie, Desmond, Te Ye, Vu */ @Controller @Slf4j public class MainController { public static final String CONTENT_DISPOSITION = "Content-Disposition"; public static final String APPLICATION_FORCE_DOWNLOAD = "application/force-download"; private static final String SESSION_LOGGED_IN_USER_ID = "loggedInUserId"; private TeamManager teamManager = TeamManager.getInstance(); // private UserManager userManager = UserManager.getInstance(); // private ExperimentManager experimentManager = ExperimentManager.getInstance(); // private DomainManager domainManager = DomainManager.getInstance(); // private DatasetManager datasetManager = DatasetManager.getInstance(); // private NodeManager nodeManager = NodeManager.getInstance(); private static final String CONTACT_EMAIL = "[email protected]"; private static final String UNKNOWN = "?"; private static final String MESSAGE = "message"; private static final String MESSAGE_SUCCESS = "messageSuccess"; private static final String EXPERIMENT_MESSAGE = "exp_message"; private static final String ERROR_PREFIX = "Error: "; // error messages private static final String ERROR_CONNECTING_TO_SERVICE_TELEMETRY = "Error connecting to service-telemetry: {}"; private static final String ERR_SERVER_OVERLOAD = "There is a problem with your request. Please contact " + CONTACT_EMAIL; private static final String CONNECTION_ERROR = "Connection Error"; private final String permissionDeniedMessage = "Permission denied. If the error persists, please contact " + CONTACT_EMAIL; private static final String ERR_START_DATE_AFTER_END_DATE = "End date must be after start date"; // for user dashboard hashmap key values private static final String USER_DASHBOARD_APPROVED_TEAMS = "numberOfApprovedTeam"; private static final String USER_DASHBOARD_RUNNING_EXPERIMENTS = "numberOfRunningExperiments"; private static final String USER_DASHBOARD_FREE_NODES = "freeNodes"; private static final String USER_DASHBOARD_TOTAL_NODES = "totalNodes"; private static final String USER_DASHBOARD_GLOBAL_IMAGES = "globalImagesMap"; private static final String USER_DASHBOARD_LOGGED_IN_USERS_COUNT = "loggedInUsersCount"; private static final String USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT = "runningExperimentsCount"; private static final String DETER_UID = "deterUid"; private static final Pattern VALID_EMAIL_ADDRESS_REGEX = Pattern.compile("(?:(?:\\r\\n)?[ \\t])*(?:(?:(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*\\<(?:(?:\\r\\n)?[ \\t])*(?:@(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*(?:,@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*)*:(?:(?:\\r\\n)?[ \\t])*)?(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*\\>(?:(?:\\r\\n)?[ \\t])*)|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*:(?:(?:\\r\\n)?[ \\t])*(?:(?:(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*\\<(?:(?:\\r\\n)?[ \\t])*(?:@(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*(?:,@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*)*:(?:(?:\\r\\n)?[ \\t])*)?(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*\\>(?:(?:\\r\\n)?[ \\t])*)(?:,\\s*(?:(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*\\<(?:(?:\\r\\n)?[ \\t])*(?:@(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*(?:,@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*)*:(?:(?:\\r\\n)?[ \\t])*)?(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*\\>(?:(?:\\r\\n)?[ \\t])*))*)?;\\s*)"); private static final String FORGET_PSWD_PAGE = "password_reset_email"; private static final String FORGET_PSWD_NEW_PSWD_PAGE = "password_reset_new_password"; private static final String NO_PERMISSION_PAGE = "nopermission"; private static final String EXPERIMENTS = "experiments"; private static final String APPLICATION_DATE = "applicationDate"; private static final String TEAM_NAME = "teamName"; private static final String TEAM_ID = "teamId"; private static final String NODE_ID = "nodeId"; private static final String PERMISSION_DENIED = "Permission denied"; private static final String TEAM_NOT_FOUND = "Team not found"; private static final String NOT_FOUND = " not found."; private static final String EDIT_BUDGET = "editBudget"; private static final String ORIGINAL_BUDGET = "originalBudget"; private static final String REDIRECT_TEAM_PROFILE_TEAM_ID = "redirect:/team_profile/{teamId}"; private static final String REDIRECT_TEAM_PROFILE = "redirect:/team_profile/"; private static final String REDIRECT_INDEX_PAGE = "redirect:/"; private static final String REDIRECT_ENERGY_USAGE = "redirect:/energy_usage"; // remove members from team profile; to display the list of experiments created by user private static final String REMOVE_MEMBER_UID = "removeMemberUid"; private static final String REMOVE_MEMBER_NAME = "removeMemberName"; private static final String MEMBER_TYPE = "memberType"; // admin update data resource to track what fields have been updated private static final String ORIGINAL_DATARESOURCE = "original_dataresource"; private static final String NOT_APPLICABLE = "N.A."; @Autowired protected RestTemplate restTemplate; @Inject protected ObjectMapper objectMapper; @Inject protected ConnectionProperties properties; @Inject protected WebProperties webProperties; @Inject protected AccountingProperties accountingProperties; @Inject protected HttpSession httpScopedSession; @RequestMapping("/") public String index() { return "index"; } @RequestMapping("/overview") public String overview() { return "overview"; } @RequestMapping("/community") public String community() { return "community"; } @RequestMapping("/about") public String about() { return "about"; } @RequestMapping("/event") public String event() { return "event"; } @RequestMapping("/plan") public String plan() { return "plan"; } @RequestMapping("/career") public String career() { return "career"; } @RequestMapping("/pricing") public String pricing() { return "pricing"; } @RequestMapping("/resources") public String resources() { return "resources"; } @RequestMapping("/research") public String research() { return "research"; } @RequestMapping("/calendar") public String calendar() { return "calendar"; } @RequestMapping("/tutorials/createaccount") public String createAccount() { return "createaccount"; } @RequestMapping("/tutorials/createexperiment") public String createExperimentTutorial() { return "createexperiment"; } @RequestMapping("/tutorials/loadimage") public String loadimage() { return "loadimage"; } @RequestMapping("/tutorials/saveimage") public String saveimage() { return "saveimage"; } @RequestMapping("/tutorials/applyteam") public String applyteam() { return "applyteam"; } @RequestMapping("/tutorials/jointeam") public String jointeam() { return "jointeam"; } @RequestMapping("/tutorials/usenode") public String usenode() { return "usenode"; } @RequestMapping("/tutorials/usessh") public String usessh() { return "usessh"; } @RequestMapping("/tutorials/usescp") public String usescp() { return "usescp"; } @RequestMapping("/tutorials/usegui") public String usegui() { return "usegui"; } @RequestMapping("/tutorials/manageresource") public String manageresource() { return "manageresource"; } @RequestMapping("/tutorials/testbedinfo") public String testbedinfo() { return "testbedinfo"; } @RequestMapping("/tutorials/createcustom") public String createcustom() { return "createcustom"; } @RequestMapping("/error_openstack") public String error_openstack() { return "error_openstack"; } @RequestMapping("/accessexperiment") public String accessexperiment() { return "accessexperiment"; } @RequestMapping("/resource2") public String resource2() { return "resource2"; } @RequestMapping("/tutorials") public String tutorials() { return "tutorials"; } @RequestMapping("/maintainance") public String maintainance() { return "maintainance"; } @RequestMapping("/testbedInformation") public String testbedInformation(Model model) throws IOException { model.addAttribute(USER_DASHBOARD_GLOBAL_IMAGES, getGlobalImages()); return "testbed_information"; } // get all the nodes' status // there are three types of status // "free" : node is free // "in_use" : node is in use // "reload" : node is in process of freeing or unknown status // "reserved" : node is pre-reserved for a project @RequestMapping("/testbedNodesStatus") public String testbedNodesStatus(Model model) throws IOException { // get number of active users and running experiments Map<String, String> testbedStatsMap = getTestbedStats(); testbedStatsMap.put(USER_DASHBOARD_FREE_NODES, "0"); testbedStatsMap.put(USER_DASHBOARD_TOTAL_NODES, "0"); Map<String, List<Map<String, String>>> nodesStatus = getNodesStatus(); Map<String, Map<String, Long>> nodesStatusCount = new HashMap<>(); // loop through each of the machine type // tabulate the different nodes type // count the number of different nodes status, e.g. SYSTEMX = { FREE = 10, IN_USE = 11, ... } nodesStatus.entrySet().forEach(machineTypeListEntry -> { Map<String, Long> nodesCountMap = new HashMap<>(); long free = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "free".equalsIgnoreCase(stringStringMap.get("status"))).count(); long inUse = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "in_use".equalsIgnoreCase(stringStringMap.get("status"))).count(); long reserved = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "reserved".equalsIgnoreCase(stringStringMap.get("status"))).count(); long reload = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "reload".equalsIgnoreCase(stringStringMap.get("status"))).count(); long total = free + inUse + reserved + reload; long currentTotal = Long.parseLong(testbedStatsMap.get(USER_DASHBOARD_TOTAL_NODES)) + total; long currentFree = Long.parseLong(testbedStatsMap.get(USER_DASHBOARD_FREE_NODES)) + free; nodesCountMap.put(NodeType.FREE.name(), free); nodesCountMap.put(NodeType.IN_USE.name(), inUse); nodesCountMap.put(NodeType.RESERVED.name(), reserved); nodesCountMap.put(NodeType.RELOADING.name(), reload); nodesStatusCount.put(machineTypeListEntry.getKey(), nodesCountMap); testbedStatsMap.put(USER_DASHBOARD_FREE_NODES, Long.toString(currentFree)); testbedStatsMap.put(USER_DASHBOARD_TOTAL_NODES, Long.toString(currentTotal)); }); model.addAttribute("nodesStatus", nodesStatus); model.addAttribute("nodesStatusCount", nodesStatusCount); model.addAttribute(USER_DASHBOARD_LOGGED_IN_USERS_COUNT, testbedStatsMap.get(USER_DASHBOARD_LOGGED_IN_USERS_COUNT)); model.addAttribute(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT, testbedStatsMap.get(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT)); model.addAttribute(USER_DASHBOARD_FREE_NODES, testbedStatsMap.get(USER_DASHBOARD_FREE_NODES)); model.addAttribute(USER_DASHBOARD_TOTAL_NODES, testbedStatsMap.get(USER_DASHBOARD_TOTAL_NODES)); return "testbed_nodes_status"; } @RequestMapping(value = "/orderform/download", method = RequestMethod.GET) public void OrderForm_v1Download(HttpServletResponse response) throws OrderFormDownloadException, IOException { InputStream stream = null; response.setContentType(MediaType.APPLICATION_PDF_VALUE); try { stream = getClass().getClassLoader().getResourceAsStream("downloads/order_form.pdf"); response.setContentType(APPLICATION_FORCE_DOWNLOAD); response.setHeader(CONTENT_DISPOSITION, "attachment; filename=order_form.pdf"); IOUtils.copy(stream, response.getOutputStream()); response.flushBuffer(); } catch (IOException ex) { log.info("Error for download orderform."); throw new OrderFormDownloadException("Error for download orderform."); } finally { if (stream != null) { stream.close(); } } } @RequestMapping("/contactus") public String contactus() { return "contactus"; } @RequestMapping("/notfound") public String redirectNotFound(HttpSession session) { if (session.getAttribute("id") != null && !session.getAttribute("id").toString().isEmpty()) { // user is already logged on and has encountered an error // redirect to dashboard return "redirect:/dashboard"; } else { // user have not logged on before // redirect to home page return REDIRECT_INDEX_PAGE; } } @RequestMapping(value = "/login", method = RequestMethod.GET) public String login(Model model) { model.addAttribute("loginForm", new LoginForm()); return "login"; } @RequestMapping(value = "/emailVerification", params = {"id", "email", "key"}) public String verifyEmail( @NotNull @RequestParam("id") final String id, @NotNull @RequestParam("email") final String emailBase64, @NotNull @RequestParam("key") final String key ) throws UnsupportedEncodingException { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); ObjectNode keyObject = objectMapper.createObjectNode(); keyObject.put("key", key); HttpEntity<String> request = new HttpEntity<>(keyObject.toString(), headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); final String link = properties.getSioRegUrl() + "/users/" + id + "/emails/" + emailBase64; log.info("Activation link: {}, verification key {}", link, key); ResponseEntity response = restTemplate.exchange(link, HttpMethod.PUT, request, String.class); if (RestUtil.isError(response.getStatusCode())) { log.error("Activation of user {} failed.", id); return "email_validation_failed"; } else { log.info("Activation of user {} completed.", id); return "email_validation_ok"; } } @RequestMapping(value = "/login", method = RequestMethod.POST) public String loginSubmit( @Valid @ModelAttribute("loginForm") LoginForm loginForm, BindingResult bindingResult, Model model, HttpSession session, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { if (bindingResult.hasErrors()) { loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } String inputEmail = loginForm.getLoginEmail(); String inputPwd = loginForm.getLoginPassword(); if (inputEmail.trim().isEmpty() || inputPwd.trim().isEmpty()) { loginForm.setErrorMsg("Email or Password cannot be empty!"); return "login"; } String plainCreds = inputEmail + ":" + inputPwd; byte[] plainCredsBytes = plainCreds.getBytes(); byte[] base64CredsBytes = Base64.encodeBase64(plainCredsBytes); String base64Creds = new String(base64CredsBytes); ResponseEntity response; HttpHeaders headers = new HttpHeaders(); headers.set("Authorization", "Basic " + base64Creds); HttpEntity<String> request = new HttpEntity<>(headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); try { response = restTemplate.exchange(properties.getSioAuthUrl(), HttpMethod.POST, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio authentication service: {}", e); loginForm.setErrorMsg(ERR_SERVER_OVERLOAD); return "login"; } String jwtTokenString = response.getBody().toString(); log.info("token string {}", jwtTokenString); if (jwtTokenString == null || jwtTokenString.isEmpty()) { log.warn("login failed for {}: unknown response code", loginForm.getLoginEmail()); loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } if (RestUtil.isError(response.getStatusCode())) { try { MyErrorResource error = objectMapper.readValue(jwtTokenString, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); if (exceptionState == ExceptionState.CREDENTIALS_NOT_FOUND_EXCEPTION) { log.warn("login failed for {}: credentials not found", loginForm.getLoginEmail()); loginForm.setErrorMsg("Login failed: Account does not exist. Please register."); return "login"; } log.warn("login failed for {}: {}", loginForm.getLoginEmail(), error.getError()); loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } catch (IOException ioe) { log.warn("IOException {}", ioe); throw new WebServiceRuntimeException(ioe.getMessage()); } } JSONObject tokenObject = new JSONObject(jwtTokenString); String token = tokenObject.getString("token"); String id = tokenObject.getString("id"); String role = ""; if (tokenObject.getJSONArray("roles") != null) { role = tokenObject.getJSONArray("roles").get(0).toString(); } if (token.trim().isEmpty() || id.trim().isEmpty() || role.trim().isEmpty()) { log.warn("login failed for {}: empty id {} or token {} or role {}", loginForm.getLoginEmail(), id, token, role); loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } // now check user status to decide what to show to the user User2 user = invokeAndExtractUserInfo(id); try { String userStatus = user.getStatus(); boolean emailVerified = user.getEmailVerified(); if (UserStatus.FROZEN.toString().equals(userStatus)) { log.warn("User {} login failed: account has been frozen", id); loginForm.setErrorMsg("Login Failed: Account Frozen. Please contact " + CONTACT_EMAIL); return "login"; } else if (!emailVerified || (UserStatus.CREATED.toString()).equals(userStatus)) { redirectAttributes.addAttribute("statuschecklist", userStatus); log.info("User {} not validated, redirected to email verification page", id); return "redirect:/email_checklist"; } else if ((UserStatus.PENDING.toString()).equals(userStatus)) { redirectAttributes.addAttribute("statuschecklist", userStatus); log.info("User {} not approved, redirected to application pending page", id); return "redirect:/email_checklist"; } else if ((UserStatus.APPROVED.toString()).equals(userStatus)) { // set session variables setSessionVariables(session, loginForm.getLoginEmail(), id, user.getFirstName(), role, token); log.info("login success for {}, id: {}", loginForm.getLoginEmail(), id); return "redirect:/dashboard"; } else { log.warn("login failed for user {}: account is rejected or closed", id); loginForm.setErrorMsg("Login Failed: Account Rejected/Closed."); return "login"; } } catch (Exception e) { log.warn("Error parsing json object for user: {}", e.getMessage()); loginForm.setErrorMsg(ERR_SERVER_OVERLOAD); return "login"; } } // triggered when user clicks "Forget Password?" @RequestMapping("/password_reset_email") public String passwordResetEmail(Model model) { model.addAttribute("passwordResetRequestForm", new PasswordResetRequestForm()); return FORGET_PSWD_PAGE; } // triggered when user clicks "Send Reset Email" button @PostMapping("/password_reset_request") public String sendPasswordResetRequest( @ModelAttribute("passwordResetRequestForm") PasswordResetRequestForm passwordResetRequestForm ) throws WebServiceRuntimeException { String email = passwordResetRequestForm.getEmail(); if (!VALID_EMAIL_ADDRESS_REGEX.matcher(email).matches()) { passwordResetRequestForm.setErrMsg("Please provide a valid email address"); return FORGET_PSWD_PAGE; } JSONObject obj = new JSONObject(); obj.put("username", email); log.info("Connecting to sio for password reset email: {}", email); HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); HttpEntity<String> request = new HttpEntity<>(obj.toString(), headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = null; try { response = restTemplate.exchange(properties.getPasswordResetRequestURI(), HttpMethod.POST, request, String.class); } catch (RestClientException e) { log.warn("Cannot connect to sio for password reset email: {}", e); passwordResetRequestForm.setErrMsg("Cannot connect. Server may be down!"); return FORGET_PSWD_PAGE; } if (RestUtil.isError(response.getStatusCode())) { log.warn("Server responded error for password reset email: {}", response.getStatusCode()); passwordResetRequestForm.setErrMsg("Email not registered. Please use a different email address."); return FORGET_PSWD_PAGE; } log.info("Password reset email sent for {}", email); return "password_reset_email_sent"; } // triggered when user clicks password reset link in the email @RequestMapping(path = "/passwordReset", params = {"key"}) public String passwordResetNewPassword(@NotNull @RequestParam("key") final String key, Model model) { PasswordResetForm form = new PasswordResetForm(); form.setKey(key); model.addAttribute("passwordResetForm", form); // redirect to the page for user to enter new password return FORGET_PSWD_NEW_PSWD_PAGE; } // actual call to sio to reset password @RequestMapping(path = "/password_reset") public String resetPassword(@ModelAttribute("passwordResetForm") PasswordResetForm passwordResetForm) throws IOException { if (!passwordResetForm.isPasswordOk()) { return FORGET_PSWD_NEW_PSWD_PAGE; } JSONObject obj = new JSONObject(); obj.put("key", passwordResetForm.getKey()); obj.put("new", passwordResetForm.getPassword1()); log.info("Connecting to sio for password reset, key = {}", passwordResetForm.getKey()); HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); HttpEntity<String> request = new HttpEntity<>(obj.toString(), headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = null; try { response = restTemplate.exchange(properties.getPasswordResetURI(), HttpMethod.PUT, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio for password reset! {}", e); passwordResetForm.setErrMsg("Cannot connect to server! Please try again later."); return FORGET_PSWD_NEW_PSWD_PAGE; } if (RestUtil.isError(response.getStatusCode())) { EnumMap<ExceptionState, String> exceptionMessageMap = new EnumMap<>(ExceptionState.class); exceptionMessageMap.put(PASSWORD_RESET_REQUEST_TIMEOUT_EXCEPTION, "Password reset request timed out. Please request a new reset email."); exceptionMessageMap.put(PASSWORD_RESET_REQUEST_NOT_FOUND_EXCEPTION, "Invalid password reset request. Please request a new reset email."); exceptionMessageMap.put(ADAPTER_CONNECTION_EXCEPTION, "Server-side error. Please contact " + CONTACT_EMAIL); MyErrorResource error = objectMapper.readValue(response.getBody().toString(), MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); final String errMsg = exceptionMessageMap.get(exceptionState) == null ? ERR_SERVER_OVERLOAD : exceptionMessageMap.get(exceptionState); passwordResetForm.setErrMsg(errMsg); log.warn("Server responded error for password reset: {}", exceptionState.toString()); return FORGET_PSWD_NEW_PSWD_PAGE; } log.info("Password was reset, key = {}", passwordResetForm.getKey()); return "password_reset_success"; } @RequestMapping("/dashboard") public String dashboard(Model model, HttpSession session) throws WebServiceRuntimeException { HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getDeterUid(session.getAttribute(webProperties.getSessionUserId()).toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { log.error("No user exists : {}", session.getAttribute(webProperties.getSessionUserId())); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); model.addAttribute(DETER_UID, CONNECTION_ERROR); } else { log.info("Show the deter user id: {}", responseBody); model.addAttribute(DETER_UID, responseBody); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } // retrieve user dashboard stats Map<String, Integer> userDashboardMap = getUserDashboardStats(session.getAttribute(webProperties.getSessionUserId()).toString()); List<TeamUsageInfo> usageInfoList = getTeamsUsageStatisticsForUser(session.getAttribute(webProperties.getSessionUserId()).toString()); model.addAttribute("userDashboardMap", userDashboardMap); model.addAttribute("usageInfoList", usageInfoList); return "dashboard"; } @RequestMapping(value = "/logout", method = RequestMethod.GET) public String logout(HttpSession session) { removeSessionVariables(session); return REDIRECT_INDEX_PAGE; } //--------------------------Sign Up Page-------------------------- @RequestMapping(value = "/signup2", method = RequestMethod.GET) public String signup2(Model model, HttpServletRequest request) { Map<String, ?> inputFlashMap = RequestContextUtils.getInputFlashMap(request); if (inputFlashMap != null) { log.debug((String) inputFlashMap.get(MESSAGE)); model.addAttribute("signUpMergedForm", (SignUpMergedForm) inputFlashMap.get("signUpMergedForm")); } else { log.debug("InputFlashMap is null"); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); } return "signup2"; } @RequestMapping(value = "/signup2", method = RequestMethod.POST) public String validateDetails( @Valid @ModelAttribute("signUpMergedForm") SignUpMergedForm signUpMergedForm, BindingResult bindingResult, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { if (bindingResult.hasErrors() || signUpMergedForm.getIsValid() == false) { log.warn("Register form has errors {}", signUpMergedForm.toString()); return "signup2"; } if (!signUpMergedForm.getHasAcceptTeamOwnerPolicy()) { signUpMergedForm.setErrorTeamOwnerPolicy("Please accept the team owner policy"); log.warn("Policy not accepted"); return "signup2"; } // get form fields // craft the registration json JSONObject mainObject = new JSONObject(); JSONObject credentialsFields = new JSONObject(); credentialsFields.put("username", signUpMergedForm.getEmail().trim()); credentialsFields.put("password", signUpMergedForm.getPassword()); // create the user JSON JSONObject userFields = new JSONObject(); JSONObject userDetails = new JSONObject(); JSONObject addressDetails = new JSONObject(); userDetails.put("firstName", signUpMergedForm.getFirstName().trim()); userDetails.put("lastName", signUpMergedForm.getLastName().trim()); userDetails.put("jobTitle", signUpMergedForm.getJobTitle().trim()); userDetails.put("email", signUpMergedForm.getEmail().trim()); userDetails.put("phone", signUpMergedForm.getPhone().trim()); userDetails.put("institution", signUpMergedForm.getInstitution().trim()); userDetails.put("institutionAbbreviation", signUpMergedForm.getInstitutionAbbreviation().trim()); userDetails.put("institutionWeb", signUpMergedForm.getWebsite().trim()); userDetails.put("address", addressDetails); addressDetails.put("address1", signUpMergedForm.getAddress1().trim()); addressDetails.put("address2", signUpMergedForm.getAddress2().trim()); addressDetails.put("country", signUpMergedForm.getCountry().trim()); addressDetails.put("region", signUpMergedForm.getProvince().trim()); addressDetails.put("city", signUpMergedForm.getCity().trim()); addressDetails.put("zipCode", signUpMergedForm.getPostalCode().trim()); userFields.put("userDetails", userDetails); userFields.put(APPLICATION_DATE, ZonedDateTime.now()); JSONObject teamFields = new JSONObject(); // add all to main json mainObject.put("credentials", credentialsFields); mainObject.put("user", userFields); mainObject.put("team", teamFields); // check if user chose create new team or join existing team by checking team name String createNewTeamName = signUpMergedForm.getTeamName().trim(); String joinNewTeamName = signUpMergedForm.getJoinTeamName().trim(); if (createNewTeamName != null && !createNewTeamName.isEmpty()) { log.info("Signup new team name {}", createNewTeamName); boolean errorsFound = false; if (createNewTeamName.length() < 2 || createNewTeamName.length() > 12) { errorsFound = true; signUpMergedForm.setErrorTeamName("Team name must be 2 to 12 alphabetic/numeric characters"); } if (signUpMergedForm.getTeamDescription() == null || signUpMergedForm.getTeamDescription().isEmpty()) { errorsFound = true; signUpMergedForm.setErrorTeamDescription("Team description cannot be empty"); } if (signUpMergedForm.getTeamWebsite() == null || signUpMergedForm.getTeamWebsite().isEmpty()) { errorsFound = true; signUpMergedForm.setErrorTeamWebsite("Team website cannot be empty"); } if (errorsFound) { log.warn("Signup new team error {}", signUpMergedForm.toString()); // clear join team name first before submitting the form signUpMergedForm.setJoinTeamName(null); return "signup2"; } else { teamFields.put("name", signUpMergedForm.getTeamName().trim()); teamFields.put("description", signUpMergedForm.getTeamDescription().trim()); teamFields.put("website", signUpMergedForm.getTeamWebsite().trim()); teamFields.put("organisationType", signUpMergedForm.getTeamOrganizationType()); teamFields.put("visibility", signUpMergedForm.getIsPublic()); mainObject.put("isJoinTeam", false); try { registerUserToDeter(mainObject); } catch ( TeamNotFoundException | TeamNameAlreadyExistsException | UsernameAlreadyExistsException | EmailAlreadyExistsException | InvalidTeamNameException | InvalidPasswordException | DeterLabOperationFailedException e) { redirectAttributes.addFlashAttribute(MESSAGE, e.getMessage()); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } catch (Exception e) { redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } log.info("Signup new team success"); return "redirect:/team_application_submitted"; } } else if (joinNewTeamName != null && !joinNewTeamName.isEmpty()) { log.info("Signup join team name {}", joinNewTeamName); // get the team JSON from team name Team2 joinTeamInfo; try { joinTeamInfo = getTeamIdByName(signUpMergedForm.getJoinTeamName().trim()); } catch (TeamNotFoundException | AdapterConnectionException e) { redirectAttributes.addFlashAttribute(MESSAGE, e.getMessage()); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } teamFields.put("id", joinTeamInfo.getId()); // set the flag to indicate to controller that it is joining an existing team mainObject.put("isJoinTeam", true); try { registerUserToDeter(mainObject); } catch ( TeamNotFoundException | AdapterConnectionException | TeamNameAlreadyExistsException | UsernameAlreadyExistsException | EmailAlreadyExistsException | InvalidTeamNameException | InvalidPasswordException | DeterLabOperationFailedException e) { redirectAttributes.addFlashAttribute(MESSAGE, e.getMessage()); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } catch (Exception e) { redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } log.info("Signup join team success"); log.info("jointeam info: {}", joinTeamInfo); redirectAttributes.addFlashAttribute("team", joinTeamInfo); return "redirect:/join_application_submitted"; } else { log.warn("Signup unreachable statement"); // logic error not suppose to reach here // possible if user fill up create new team but without the team name redirectAttributes.addFlashAttribute("signupError", "There is a problem when submitting your form. Please re-enter and submit the details again."); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } } /** * Use when registering new accounts * * @param mainObject A JSONObject that contains user's credentials, personal details and team application details */ private void registerUserToDeter(JSONObject mainObject) throws WebServiceRuntimeException, TeamNotFoundException, AdapterConnectionException, TeamNameAlreadyExistsException, UsernameAlreadyExistsException, EmailAlreadyExistsException, InvalidTeamNameException, InvalidPasswordException, DeterLabOperationFailedException { HttpEntity<String> request = createHttpEntityWithBodyNoAuthHeader(mainObject.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getSioRegUrl(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); log.info("Register user to deter response: {}", responseBody); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); log.warn("Register user exception error: {}", error.getError()); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Register new user failed on DeterLab: {}", error.getMessage()); throw new DeterLabOperationFailedException(ERROR_PREFIX + (error.getMessage().contains("unknown error") ? ERR_SERVER_OVERLOAD : error.getMessage())); case TEAM_NAME_ALREADY_EXISTS_EXCEPTION: log.warn("Register new users new team request : team name already exists"); throw new TeamNameAlreadyExistsException("Team name already exists"); case INVALID_TEAM_NAME_EXCEPTION: log.warn("Register new users new team request : team name invalid"); throw new InvalidTeamNameException("Invalid team name: must be 6-12 alphanumeric characters only"); case INVALID_PASSWORD_EXCEPTION: log.warn("Register new users new team request : invalid password"); throw new InvalidPasswordException("Password is too simple"); case USERNAME_ALREADY_EXISTS_EXCEPTION: // throw from user service { String email = mainObject.getJSONObject("user").getJSONObject("userDetails").getString("email"); log.warn("Register new users : email already exists: {}", email); throw new UsernameAlreadyExistsException(ERROR_PREFIX + email + " already in use."); } case EMAIL_ALREADY_EXISTS_EXCEPTION: // throw from adapter deterlab { String email = mainObject.getJSONObject("user").getJSONObject("userDetails").getString("email"); log.warn("Register new users : email already exists: {}", email); throw new EmailAlreadyExistsException(ERROR_PREFIX + email + " already in use."); } default: log.warn("Registration or adapter connection fail"); // possible sio or adapter connection fail throw new AdapterConnectionException(ERR_SERVER_OVERLOAD); } } else { // do nothing log.info("Not an error for status code: {}", response.getStatusCode()); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } /** * Use when users register a new account for joining existing team * * @param teamName The team name to join * @return the team id from sio */ private Team2 getTeamIdByName(String teamName) throws WebServiceRuntimeException, TeamNotFoundException, AdapterConnectionException { // FIXME check if team name exists // FIXME check for general exception? HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getTeamByName(teamName), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); if (exceptionState == ExceptionState.TEAM_NOT_FOUND_EXCEPTION) { log.warn("Get team by name : team name error"); throw new TeamNotFoundException("Team name " + teamName + " does not exists"); } else { log.warn("Team service or adapter connection fail"); // possible sio or adapter connection fail throw new AdapterConnectionException(ERR_SERVER_OVERLOAD); } } else { return extractTeamInfo(responseBody); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } //--------------------------Account Settings Page-------------------------- @RequestMapping(value = "/account_settings", method = RequestMethod.GET) public String accountDetails(Model model, HttpSession session) throws WebServiceRuntimeException { String userId_uri = properties.getSioUsersUrl() + session.getAttribute("id"); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(userId_uri, HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { log.error("No user to edit : {}", session.getAttribute("id")); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); throw new RestClientException("[" + error.getError() + "] "); } else { User2 user2 = extractUserInfo(responseBody); // need to do this so that we can compare after submitting the form session.setAttribute(webProperties.getSessionUserAccount(), user2); model.addAttribute("editUser", user2); return "account_settings"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping(value = "/account_settings", method = RequestMethod.POST) public String editAccountDetails( @ModelAttribute("editUser") User2 editUser, final RedirectAttributes redirectAttributes, HttpSession session) throws WebServiceRuntimeException { boolean errorsFound = false; String editPhrase = "editPhrase"; // check fields first if (errorsFound == false && editUser.getFirstName().isEmpty()) { redirectAttributes.addFlashAttribute("editFirstName", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getLastName().isEmpty()) { redirectAttributes.addFlashAttribute("editLastName", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getPhone().isEmpty()) { redirectAttributes.addFlashAttribute("editPhone", "fail"); errorsFound = true; } if (errorsFound == false && (editUser.getPhone().matches("(.*)[a-zA-Z](.*)") || editUser.getPhone().length() < 6)) { // previously already check if phone is empty // now check phone must contain only digits redirectAttributes.addFlashAttribute("editPhone", "fail"); errorsFound = true; } if (errorsFound == false && !editUser.getConfirmPassword().isEmpty() && !editUser.isPasswordValid()) { redirectAttributes.addFlashAttribute(editPhrase, "invalid"); errorsFound = true; } if (errorsFound == false && editUser.getJobTitle().isEmpty()) { redirectAttributes.addFlashAttribute("editJobTitle", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getInstitution().isEmpty()) { redirectAttributes.addFlashAttribute("editInstitution", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getCountry().isEmpty()) { redirectAttributes.addFlashAttribute("editCountry", "fail"); errorsFound = true; } if (errorsFound) { session.removeAttribute(webProperties.getSessionUserAccount()); return "redirect:/account_settings"; } else { // used to compare original and edited User2 objects User2 originalUser = (User2) session.getAttribute(webProperties.getSessionUserAccount()); JSONObject userObject = new JSONObject(); JSONObject userDetails = new JSONObject(); JSONObject address = new JSONObject(); userDetails.put("firstName", editUser.getFirstName()); userDetails.put("lastName", editUser.getLastName()); userDetails.put("email", editUser.getEmail()); userDetails.put("phone", editUser.getPhone()); userDetails.put("jobTitle", editUser.getJobTitle()); userDetails.put("address", address); userDetails.put("institution", editUser.getInstitution()); userDetails.put("institutionAbbreviation", originalUser.getInstitutionAbbreviation()); userDetails.put("institutionWeb", originalUser.getInstitutionWeb()); address.put("address1", originalUser.getAddress1()); address.put("address2", originalUser.getAddress2()); address.put("country", editUser.getCountry()); address.put("city", originalUser.getCity()); address.put("region", originalUser.getRegion()); address.put("zipCode", originalUser.getPostalCode()); userObject.put("userDetails", userDetails); String userId_uri = properties.getSioUsersUrl() + session.getAttribute(webProperties.getSessionUserId()); HttpEntity<String> request = createHttpEntityWithBody(userObject.toString()); restTemplate.exchange(userId_uri, HttpMethod.PUT, request, String.class); if (!originalUser.getFirstName().equals(editUser.getFirstName())) { redirectAttributes.addFlashAttribute("editFirstName", "success"); } if (!originalUser.getLastName().equals(editUser.getLastName())) { redirectAttributes.addFlashAttribute("editLastName", "success"); } if (!originalUser.getPhone().equals(editUser.getPhone())) { redirectAttributes.addFlashAttribute("editPhone", "success"); } if (!originalUser.getJobTitle().equals(editUser.getJobTitle())) { redirectAttributes.addFlashAttribute("editJobTitle", "success"); } if (!originalUser.getInstitution().equals(editUser.getInstitution())) { redirectAttributes.addFlashAttribute("editInstitution", "success"); } if (!originalUser.getCountry().equals(editUser.getCountry())) { redirectAttributes.addFlashAttribute("editCountry", "success"); } // credential service change password if (editUser.isPasswordMatch()) { JSONObject credObject = new JSONObject(); credObject.put("password", editUser.getPassword()); HttpEntity<String> credRequest = createHttpEntityWithBody(credObject.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getUpdateCredentials(session.getAttribute("id").toString()), HttpMethod.PUT, credRequest, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); redirectAttributes.addFlashAttribute(editPhrase, "fail"); } else { redirectAttributes.addFlashAttribute(editPhrase, "success"); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } finally { session.removeAttribute(webProperties.getSessionUserAccount()); } } } return "redirect:/account_settings"; } //--------------------User Side Approve Members Page------------ @RequestMapping("/approve_new_user") public String approveNewUser(Model model, HttpSession session) throws Exception { // HashMap<Integer, Team> rv = new HashMap<Integer, Team>(); // rv = teamManager.getTeamMapByTeamOwner(getSessionIdOfLoggedInUser(session)); // boolean userHasAnyJoinRequest = hasAnyJoinRequest(rv); // model.addAttribute("teamMapOwnedByUser", rv); // model.addAttribute("userHasAnyJoinRequest", userHasAnyJoinRequest); List<JoinRequestApproval> rv = new ArrayList<>(); List<JoinRequestApproval> temp; // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); JSONObject object = new JSONObject(responseBody); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); Team2 team2 = new Team2(); JSONObject teamObject = new JSONObject(teamResponseBody); JSONArray membersArray = teamObject.getJSONArray("members"); team2.setId(teamObject.getString("id")); team2.setName(teamObject.getString("name")); boolean isTeamLeader = false; temp = new ArrayList<>(); for (int j = 0; j < membersArray.length(); j++) { JSONObject memberObject = membersArray.getJSONObject(j); String userId = memberObject.getString("userId"); String teamMemberType = memberObject.getString(MEMBER_TYPE); String teamMemberStatus = memberObject.getString("memberStatus"); String teamJoinedDate = formatZonedDateTime(memberObject.get("joinedDate").toString()); JoinRequestApproval joinRequestApproval = new JoinRequestApproval(); if (userId.equals(session.getAttribute("id").toString()) && teamMemberType.equals(MemberType.OWNER.toString())) { isTeamLeader = true; } if (teamMemberStatus.equals(MemberStatus.PENDING.toString()) && teamMemberType.equals(MemberType.MEMBER.toString())) { User2 myUser = invokeAndExtractUserInfo(userId); joinRequestApproval.setUserId(myUser.getId()); joinRequestApproval.setUserEmail(myUser.getEmail()); joinRequestApproval.setUserName(myUser.getFirstName() + " " + myUser.getLastName()); joinRequestApproval.setApplicationDate(teamJoinedDate); joinRequestApproval.setTeamId(team2.getId()); joinRequestApproval.setTeamName(team2.getName()); joinRequestApproval.setVerified(myUser.getEmailVerified()); temp.add(joinRequestApproval); log.info("Join request: UserId: {}, UserEmail: {}", myUser.getId(), myUser.getEmail()); } } if (isTeamLeader) { if (!temp.isEmpty()) { rv.addAll(temp); } } } model.addAttribute("joinApprovalList", rv); return "approve_new_user"; } @RequestMapping("/approve_new_user/accept/{teamId}/{userId}") public String userSideAcceptJoinRequest( @PathVariable String teamId, @PathVariable String userId, HttpSession session, RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { log.info("Approve join request: User {}, Team {}, Approver {}", userId, teamId, session.getAttribute("id").toString()); JSONObject mainObject = new JSONObject(); JSONObject userFields = new JSONObject(); userFields.put("id", session.getAttribute("id").toString()); mainObject.put("user", userFields); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getApproveJoinRequest(teamId, userId), HttpMethod.POST, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/approve_new_user"; } String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { try { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case EMAIL_NOT_VERIFIED_EXCEPTION: log.warn("Approve join request: User {} email not verified", userId); redirectAttributes.addFlashAttribute(MESSAGE, "User email has not been verified"); break; case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Approve join request: User {}, Team {} fail", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Approve join request fail"); break; default: log.warn("Server side error: {}", error.getError()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/approve_new_user"; } catch (IOException ioe) { log.warn("IOException {}", ioe); throw new WebServiceRuntimeException(ioe.getMessage()); } } // everything looks OK? log.info("Join request has been APPROVED, User {}, Team {}", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Join request has been APPROVED."); return "redirect:/approve_new_user"; } @RequestMapping("/approve_new_user/reject/{teamId}/{userId}") public String userSideRejectJoinRequest( @PathVariable String teamId, @PathVariable String userId, HttpSession session, RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { log.info("Reject join request: User {}, Team {}, Approver {}", userId, teamId, session.getAttribute("id").toString()); JSONObject mainObject = new JSONObject(); JSONObject userFields = new JSONObject(); userFields.put("id", session.getAttribute("id").toString()); mainObject.put("user", userFields); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getRejectJoinRequest(teamId, userId), HttpMethod.DELETE, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/approve_new_user"; } String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { try { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Reject join request: User {}, Team {} fail", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Reject join request fail"); break; default: log.warn("Server side error: {}", error.getError()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/approve_new_user"; } catch (IOException ioe) { log.warn("IOException {}", ioe); throw new WebServiceRuntimeException(ioe.getMessage()); } } // everything looks OK? log.info("Join request has been REJECTED, User {}, Team {}", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Join request has been REJECTED."); return "redirect:/approve_new_user"; } //--------------------------Teams Page-------------------------- @RequestMapping("/public_teams") public String publicTeamsBeforeLogin(Model model) { TeamManager2 teamManager2 = new TeamManager2(); // get public teams HttpEntity<String> teamRequest = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamsByVisibility(TeamVisibility.PUBLIC.toString()), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); JSONArray teamPublicJsonArray = new JSONArray(teamResponseBody); for (int i = 0; i < teamPublicJsonArray.length(); i++) { JSONObject teamInfoObject = teamPublicJsonArray.getJSONObject(i); Team2 team2 = extractTeamInfo(teamInfoObject.toString()); teamManager2.addTeamToPublicTeamMap(team2); } model.addAttribute("publicTeamMap2", teamManager2.getPublicTeamMap()); return "public_teams"; } @RequestMapping("/teams") public String teams(Model model, HttpSession session) { // int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); // model.addAttribute("infoMsg", teamManager.getInfoMsg()); // model.addAttribute("currentLoggedInUserId", currentLoggedInUserId); // model.addAttribute("teamMap", teamManager.getTeamMap(currentLoggedInUserId)); // model.addAttribute("publicTeamMap", teamManager.getPublicTeamMap()); // model.addAttribute("invitedToParticipateMap2", teamManager.getInvitedToParticipateMap2(currentLoggedInUserId)); // model.addAttribute("joinRequestMap2", teamManager.getJoinRequestTeamMap2(currentLoggedInUserId)); TeamManager2 teamManager2 = new TeamManager2(); // stores the list of images created or in progress of creation by teams // e.g. teamNameA : "created" : [imageA, imageB], "inProgress" : [imageC, imageD] Map<String, Map<String, List<Image>>> imageMap = new HashMap<>(); // get list of teamids String userId = session.getAttribute("id").toString(); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); JSONObject object = new JSONObject(responseBody); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); String userEmail = object.getJSONObject("userDetails").getString("email"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); //Tran: check if team is approved for userId Team2 joinRequestTeam = extractTeamInfoUserJoinRequest(userId, teamResponseBody); if (joinRequestTeam != null) { teamManager2.addTeamToUserJoinRequestTeamMap(joinRequestTeam); } else { Team2 team2 = extractTeamInfo(teamResponseBody); teamManager2.addTeamToTeamMap(team2); imageMap.put(team2.getName(), invokeAndGetImageList(teamId)); //Tran : only retrieve images of approved teams } } // check if inner image map is empty, have to do it via this manner // returns true if the team contains an image list boolean isInnerImageMapPresent = imageMap.values().stream().filter(perTeamImageMap -> !perTeamImageMap.isEmpty()).findFirst().isPresent(); model.addAttribute("userEmail", userEmail); model.addAttribute("teamMap2", teamManager2.getTeamMap()); model.addAttribute("userJoinRequestMap", teamManager2.getUserJoinRequestMap()); model.addAttribute("isInnerImageMapPresent", isInnerImageMapPresent); model.addAttribute("imageMap", imageMap); return "teams"; } /** * Exectues the service-image and returns a Map containing the list of images in two partitions. * One partition contains the list of already created images. * The other partition contains the list of currently saving in progress images. * * @param teamId The ncl team id to retrieve the list of images from. * @return Returns a Map containing the list of images in two partitions. */ private Map<String, List<Image>> invokeAndGetImageList(String teamId) { log.info("Getting list of saved images for team {}", teamId); Map<String, List<Image>> resultMap = new HashMap<>(); List<Image> createdImageList = new ArrayList<>(); List<Image> inProgressImageList = new ArrayList<>(); HttpEntity<String> imageRequest = createHttpEntityHeaderOnly(); ResponseEntity imageResponse; try { imageResponse = restTemplate.exchange(properties.getTeamSavedImages(teamId), HttpMethod.GET, imageRequest, String.class); } catch (ResourceAccessException e) { log.warn("Error connecting to image service: {}", e); return new HashMap<>(); } String imageResponseBody = imageResponse.getBody().toString(); String osImageList = new JSONObject(imageResponseBody).getString(teamId); JSONObject osImageObject = new JSONObject(osImageList); log.debug("osImageList: {}", osImageList); log.debug("osImageObject: {}", osImageObject); if (osImageObject == JSONObject.NULL || osImageObject.length() == 0) { log.info("List of saved images for team {} is empty.", teamId); return resultMap; } for (int k = 0; k < osImageObject.names().length(); k++) { String imageName = osImageObject.names().getString(k); String imageStatus = osImageObject.getString(imageName); log.info("Image list for team {}: image name {}, status {}", teamId, imageName, imageStatus); Image image = new Image(); image.setImageName(imageName); image.setDescription("-"); image.setTeamId(teamId); if ("created".equals(imageStatus)) { createdImageList.add(image); } else if ("notfound".equals(imageStatus)) { inProgressImageList.add(image); } } resultMap.put("created", createdImageList); resultMap.put("inProgress", inProgressImageList); return resultMap; } // @RequestMapping("/accept_participation/{teamId}") // public String acceptParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); // // get user's participation request list // // add this user id to the requested list // teamManager.acceptParticipationRequest(currentLoggedInUserId, teamId); // // remove participation request since accepted // teamManager.removeParticipationRequest(currentLoggedInUserId, teamId); // // // must get team name // String teamName = teamManager.getTeamNameByTeamId(teamId); // teamManager.setInfoMsg("You have just joined Team " + teamName + " !"); // // return "redirect:/teams"; // } // @RequestMapping("/ignore_participation/{teamId}") // public String ignoreParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // // get user's participation request list // // remove this user id from the requested list // String teamName = teamManager.getTeamNameByTeamId(teamId); // teamManager.ignoreParticipationRequest2(getSessionIdOfLoggedInUser(session), teamId); // teamManager.setInfoMsg("You have just ignored a team request from Team " + teamName + " !"); // // return "redirect:/teams"; // } // @RequestMapping("/withdraw/{teamId}") public String withdrawnJoinRequest(@PathVariable Integer teamId, HttpSession session) { // get user team request // remove this user id from the user's request list String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.removeUserJoinRequest2(getSessionIdOfLoggedInUser(session), teamId); teamManager.setInfoMsg("You have withdrawn your join request for Team " + teamName); return "redirect:/teams"; } // @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.GET) // public String inviteMember(@PathVariable Integer teamId, Model model) { // model.addAttribute("teamIdVar", teamId); // model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); // return "team_page_invite_members"; // } // @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.POST) // public String sendInvitation(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm,Model model) { // int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); // teamManager.addInvitedToParticipateMap(userId, teamId); // return "redirect:/teams"; // } @RequestMapping(value = "/teams/members_approval/{teamId}", method = RequestMethod.GET) public String membersApproval(@PathVariable Integer teamId, Model model) { model.addAttribute("team", teamManager.getTeamByTeamId(teamId)); return "team_page_approve_members"; } @RequestMapping("/teams/members_approval/accept/{teamId}/{userId}") public String acceptJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.acceptJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } @RequestMapping("/teams/members_approval/reject/{teamId}/{userId}") public String rejectJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.rejectJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } //--------------------------Team Profile Page-------------------------- @RequestMapping(value = "/team_profile/{teamId}", method = RequestMethod.GET) public String teamProfile(@PathVariable String teamId, Model model, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); Team2 team = extractTeamInfo(responseBody); model.addAttribute("team", team); model.addAttribute("owner", team.getOwner()); model.addAttribute("membersList", team.getMembersStatusMap().get(MemberStatus.APPROVED)); session.setAttribute("originalTeam", team); request = createHttpEntityHeaderOnly(); response = restTemplate.exchange(properties.getExpListByTeamId(teamId), HttpMethod.GET, request, String.class); JSONArray experimentsArray = new JSONArray(response.getBody().toString()); List<Experiment2> experimentList = new ArrayList<>(); Map<Long, Realization> realizationMap = new HashMap<>(); for (int k = 0; k < experimentsArray.length(); k++) { Experiment2 experiment2 = extractExperiment(experimentsArray.getJSONObject(k).toString()); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); realizationMap.put(experiment2.getId(), realization); experimentList.add(experiment2); } model.addAttribute("teamExperimentList", experimentList); model.addAttribute("teamRealizationMap", realizationMap); //Starting to get quota try { response = restTemplate.exchange(properties.getQuotaByTeamId(teamId), HttpMethod.GET, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service for display team quota: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE_TEAM_ID; } responseBody = response.getBody().toString(); // handling exceptions from SIO if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn("Get team quota: Team {} not found", teamId); return REDIRECT_INDEX_PAGE; default: log.warn("Get team quota : sio or deterlab adapter connection error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } } else { log.info("Get team quota info : {}", responseBody); } TeamQuota teamQuota = extractTeamQuotaInfo(responseBody); model.addAttribute("teamQuota", teamQuota); session.setAttribute(ORIGINAL_BUDGET, teamQuota.getBudget()); // this is to check if budget changed later return "team_profile"; } @RequestMapping(value = "/team_profile/{teamId}", method = RequestMethod.POST) public String editTeamProfile( @PathVariable String teamId, @ModelAttribute("team") Team2 editTeam, final RedirectAttributes redirectAttributes, HttpSession session) { boolean errorsFound = false; if (editTeam.getDescription().isEmpty()) { errorsFound = true; redirectAttributes.addFlashAttribute("editDesc", "fail"); } if (errorsFound) { // safer to remove session.removeAttribute("originalTeam"); return REDIRECT_TEAM_PROFILE + editTeam.getId(); } // can edit team description and team website for now JSONObject teamfields = new JSONObject(); teamfields.put("id", teamId); teamfields.put("name", editTeam.getName()); teamfields.put("description", editTeam.getDescription()); teamfields.put("website", "http://default.com"); teamfields.put("organisationType", editTeam.getOrganisationType()); teamfields.put("privacy", "OPEN"); teamfields.put("status", editTeam.getStatus()); teamfields.put("members", editTeam.getMembersList()); HttpEntity<String> request = createHttpEntityWithBody(teamfields.toString()); ResponseEntity response = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.PUT, request, String.class); Team2 originalTeam = (Team2) session.getAttribute("originalTeam"); if (!originalTeam.getDescription().equals(editTeam.getDescription())) { redirectAttributes.addFlashAttribute("editDesc", "success"); } // safer to remove session.removeAttribute("originalTeam"); return REDIRECT_TEAM_PROFILE + teamId; } @RequestMapping(value = "/team_quota/{teamId}", method = RequestMethod.POST) public String editTeamQuota( @PathVariable String teamId, @ModelAttribute("teamQuota") TeamQuota editTeamQuota, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { final String QUOTA = "#quota"; JSONObject teamQuotaJSONObject = new JSONObject(); teamQuotaJSONObject.put(TEAM_ID, teamId); // check if budget is negative or exceeding limit if (!editTeamQuota.getBudget().equals("")) { if (Double.parseDouble(editTeamQuota.getBudget()) < 0) { redirectAttributes.addFlashAttribute(EDIT_BUDGET, "negativeError"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } else if(Double.parseDouble(editTeamQuota.getBudget()) > 99999999.99) { redirectAttributes.addFlashAttribute(EDIT_BUDGET, "exceedingLimit"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } } teamQuotaJSONObject.put("quota", editTeamQuota.getBudget()); HttpEntity<String> request = createHttpEntityWithBody(teamQuotaJSONObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getQuotaByTeamId(teamId), HttpMethod.PUT, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service for display team quota: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE_TEAM_ID; } String responseBody = response.getBody().toString(); // handling exceptions from SIO if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn("Get team quota: Team {} not found", teamId); return REDIRECT_INDEX_PAGE; case TEAM_QUOTA_OUT_OF_RANGE_EXCEPTION: log.warn("Get team quota: Budget is out of range"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; case FORBIDDEN_EXCEPTION: log.warn("Get team quota: Budget can only be updated by team owner."); redirectAttributes.addFlashAttribute(EDIT_BUDGET, "editDeny"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; default: log.warn("Get team quota : sio or deterlab adapter connection error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } } else { log.info("Edit team quota info : {}", responseBody); } //check if new budget is different in order to display successful message to user String originalBudget = (String) session.getAttribute(ORIGINAL_BUDGET); if (!originalBudget.equals(editTeamQuota.getBudget())) { redirectAttributes.addFlashAttribute(EDIT_BUDGET, "success"); } // safer to remove session.removeAttribute(ORIGINAL_BUDGET); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } @RequestMapping("/remove_member/{teamId}/{userId}") public String removeMember(@PathVariable String teamId, @PathVariable String userId, final RedirectAttributes redirectAttributes) throws IOException { JSONObject teamMemberFields = new JSONObject(); teamMemberFields.put("userId", userId); teamMemberFields.put(MEMBER_TYPE, MemberType.MEMBER.name()); teamMemberFields.put("memberStatus", MemberStatus.APPROVED.name()); HttpEntity<String> request = createHttpEntityWithBody(teamMemberFields.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; try { response = restTemplate.exchange(properties.removeUserFromTeam(teamId), HttpMethod.DELETE, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service for remove user: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE_TEAM_ID; } String responseBody = response.getBody().toString(); User2 user = invokeAndExtractUserInfo(userId); String name = user.getFirstName() + " " + user.getLastName(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: // two subcases when fail to remove users from team log.warn("Remove member from team: User {}, Team {} fail - {}", userId, teamId, error.getMessage()); if ("user has experiments".equals(error.getMessage())) { // case 1 - user has experiments // display the list of experiments that have to be terminated first // since the team profile page has experiments already, we don't have to retrieve them again // use the userid to filter out the experiment list at the web pages redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " Member " + name + " has experiments."); redirectAttributes.addFlashAttribute(REMOVE_MEMBER_UID, userId); redirectAttributes.addFlashAttribute(REMOVE_MEMBER_NAME, name); break; } else { // case 2 - deterlab operation failure log.warn("Remove member from team: deterlab operation failed"); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " Member " + name + " cannot be removed."); break; } default: log.warn("Server side error for remove members: {}", error.getError()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } } else { log.info("Remove member: {}", response.getBody().toString()); // add success message redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Member " + name + " has been removed."); } return REDIRECT_TEAM_PROFILE_TEAM_ID; } // @RequestMapping("/team_profile/{teamId}/start_experiment/{expId}") // public String startExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // // start experiment // // ensure experiment is stopped first before starting // experimentManager.startExperiment(getSessionIdOfLoggedInUser(session), expId); // return "redirect:/team_profile/{teamId}"; // } // @RequestMapping("/team_profile/{teamId}/stop_experiment/{expId}") // public String stopExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // // stop experiment // // ensure experiment is in ready mode before stopping // experimentManager.stopExperiment(getSessionIdOfLoggedInUser(session), expId); // return "redirect:/team_profile/{teamId}"; // } // @RequestMapping("/team_profile/{teamId}/remove_experiment/{expId}") // public String removeExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // // remove experiment // // TODO check userid is indeed the experiment owner or team owner // // ensure experiment is stopped first // if (experimentManager.removeExperiment(getSessionIdOfLoggedInUser(session), expId) == true) { // // decrease exp count to be display on Teams page // teamManager.decrementExperimentCount(teamId); // } // model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); // return "redirect:/team_profile/{teamId}"; // } // @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.GET) // public String inviteUserFromTeamProfile(@PathVariable Integer teamId, Model model) { // model.addAttribute("teamIdVar", teamId); // model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); // return "team_profile_invite_members"; // } // @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.POST) // public String sendInvitationFromTeamProfile(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm, Model model) { // int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); // teamManager.addInvitedToParticipateMap(userId, teamId); // return "redirect:/team_profile/{teamId}"; // } //--------------------------Apply for New Team Page-------------------------- @RequestMapping(value = "/teams/apply_team", method = RequestMethod.GET) public String teamPageApplyTeam(Model model) { model.addAttribute("teamPageApplyTeamForm", new TeamPageApplyTeamForm()); return "team_page_apply_team"; } @RequestMapping(value = "/teams/apply_team", method = RequestMethod.POST) public String checkApplyTeamInfo( @Valid TeamPageApplyTeamForm teamPageApplyTeamForm, BindingResult bindingResult, HttpSession session, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { final String LOG_PREFIX = "Existing user apply for new team: {}"; if (bindingResult.hasErrors()) { log.warn(LOG_PREFIX, "Application form error " + teamPageApplyTeamForm.toString()); return "team_page_apply_team"; } // log data to ensure data has been parsed log.debug(LOG_PREFIX, properties.getRegisterRequestToApplyTeam(session.getAttribute("id").toString())); log.info(LOG_PREFIX, teamPageApplyTeamForm.toString()); JSONObject mainObject = new JSONObject(); JSONObject teamFields = new JSONObject(); mainObject.put("team", teamFields); teamFields.put("name", teamPageApplyTeamForm.getTeamName()); teamFields.put("description", teamPageApplyTeamForm.getTeamDescription()); teamFields.put("website", teamPageApplyTeamForm.getTeamWebsite()); teamFields.put("organisationType", teamPageApplyTeamForm.getTeamOrganizationType()); teamFields.put("visibility", teamPageApplyTeamForm.getIsPublic()); String nclUserId = session.getAttribute("id").toString(); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getRegisterRequestToApplyTeam(nclUserId), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { // prepare the exception mapping EnumMap<ExceptionState, String> exceptionMessageMap = new EnumMap<>(ExceptionState.class); exceptionMessageMap.put(USER_ID_NULL_OR_EMPTY_EXCEPTION, "User id is null or empty "); exceptionMessageMap.put(TEAM_NAME_NULL_OR_EMPTY_EXCEPTION, "Team name is null or empty "); exceptionMessageMap.put(USER_NOT_FOUND_EXCEPTION, "User not found"); exceptionMessageMap.put(TEAM_NAME_ALREADY_EXISTS_EXCEPTION, "Team name already exists"); exceptionMessageMap.put(INVALID_TEAM_NAME_EXCEPTION, "Team name contains invalid characters"); exceptionMessageMap.put(TEAM_MEMBER_ALREADY_EXISTS_EXCEPTION, "Team member already exists"); exceptionMessageMap.put(ADAPTER_CONNECTION_EXCEPTION, "Connection to adapter failed"); exceptionMessageMap.put(ADAPTER_INTERNAL_ERROR_EXCEPTION, "Internal server error on adapter"); exceptionMessageMap.put(DETERLAB_OPERATION_FAILED_EXCEPTION, "Operation failed on DeterLab"); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); final String errorMessage = exceptionMessageMap.containsKey(exceptionState) ? error.getMessage() : ERR_SERVER_OVERLOAD; log.warn(LOG_PREFIX, responseBody); redirectAttributes.addFlashAttribute("message", errorMessage); return "redirect:/teams/apply_team"; } else { // no errors, everything ok log.info(LOG_PREFIX, "Application for team " + teamPageApplyTeamForm.getTeamName() + " submitted"); return "redirect:/teams/team_application_submitted"; } } catch (ResourceAccessException | IOException e) { log.error(LOG_PREFIX, e); throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping(value = "/acceptable_usage_policy", method = RequestMethod.GET) public String teamOwnerPolicy() { return "acceptable_usage_policy"; } @RequestMapping(value = "/terms_and_conditions", method = RequestMethod.GET) public String termsAndConditions() { return "terms_and_conditions"; } //--------------------------Join Team Page-------------------------- @RequestMapping(value = "/teams/join_team", method = RequestMethod.GET) public String teamPageJoinTeam(Model model) { model.addAttribute("teamPageJoinTeamForm", new TeamPageJoinTeamForm()); return "team_page_join_team"; } @RequestMapping(value = "/teams/join_team", method = RequestMethod.POST) public String checkJoinTeamInfo( @Valid TeamPageJoinTeamForm teamPageJoinForm, BindingResult bindingResult, Model model, HttpSession session, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { final String LOG_PREFIX = "Existing user join team: {}"; if (bindingResult.hasErrors()) { log.warn(LOG_PREFIX, "Application form error " + teamPageJoinForm.toString()); return "team_page_join_team"; } JSONObject mainObject = new JSONObject(); JSONObject teamFields = new JSONObject(); JSONObject userFields = new JSONObject(); mainObject.put("team", teamFields); mainObject.put("user", userFields); userFields.put("id", session.getAttribute("id")); // ncl-id teamFields.put("name", teamPageJoinForm.getTeamName()); log.info(LOG_PREFIX, "User " + session.getAttribute("id") + ", team " + teamPageJoinForm.getTeamName()); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { restTemplate.setErrorHandler(new MyResponseErrorHandler()); response = restTemplate.exchange(properties.getJoinRequestExistingUser(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { // prepare the exception mapping EnumMap<ExceptionState, String> exceptionMessageMap = new EnumMap<>(ExceptionState.class); exceptionMessageMap.put(USER_NOT_FOUND_EXCEPTION, "User not found"); exceptionMessageMap.put(USER_ID_NULL_OR_EMPTY_EXCEPTION, "User id is null or empty"); exceptionMessageMap.put(TEAM_NOT_FOUND_EXCEPTION, "Team name not found"); exceptionMessageMap.put(TEAM_NAME_NULL_OR_EMPTY_EXCEPTION, "Team name is null or empty"); exceptionMessageMap.put(USER_ALREADY_IN_TEAM_EXCEPTION, "User already in team"); exceptionMessageMap.put(TEAM_MEMBER_ALREADY_EXISTS_EXCEPTION, "Team member already exists"); exceptionMessageMap.put(ADAPTER_CONNECTION_EXCEPTION, "Connection to adapter failed"); exceptionMessageMap.put(ADAPTER_INTERNAL_ERROR_EXCEPTION, "Internal server error on adapter"); exceptionMessageMap.put(DETERLAB_OPERATION_FAILED_EXCEPTION, "Operation failed on DeterLab"); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); final String errorMessage = exceptionMessageMap.containsKey(exceptionState) ? error.getMessage() : ERR_SERVER_OVERLOAD; log.warn(LOG_PREFIX, responseBody); redirectAttributes.addFlashAttribute("message", errorMessage); return "redirect:/teams/join_team"; } else { log.info(LOG_PREFIX, "Application for join team " + teamPageJoinForm.getTeamName() + " submitted"); return "redirect:/teams/join_application_submitted/" + teamPageJoinForm.getTeamName(); } } catch (ResourceAccessException | IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } //--------------------------Experiment Page-------------------------- @RequestMapping(value = "/experiments", method = RequestMethod.GET) public String experiments(Model model, HttpSession session) throws WebServiceRuntimeException { // long start = System.currentTimeMillis(); List<Experiment2> experimentList = new ArrayList<>(); Map<Long, Realization> realizationMap = new HashMap<>(); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getDeterUid(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { log.error("No user to get experiment: {}", session.getAttribute("id")); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); log.info("experiment error: {} - {} - {} - user token:{}", error.getError(), error.getMessage(), error.getLocalizedMessage(), httpScopedSession.getAttribute(webProperties.getSessionJwtToken())); model.addAttribute(DETER_UID, CONNECTION_ERROR); } else { log.info("Show the deter user id: {}", responseBody); model.addAttribute(DETER_UID, responseBody); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } // get list of teamids ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); if (!isMemberJoinRequestPending(session.getAttribute("id").toString(), teamResponseBody)) { // get experiments lists of the teams HttpEntity<String> expRequest = createHttpEntityHeaderOnly(); ResponseEntity expRespEntity = restTemplate.exchange(properties.getExpListByTeamId(teamId), HttpMethod.GET, expRequest, String.class); JSONArray experimentsArray = new JSONArray(expRespEntity.getBody().toString()); for (int k = 0; k < experimentsArray.length(); k++) { Experiment2 experiment2 = extractExperiment(experimentsArray.getJSONObject(k).toString()); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); realizationMap.put(experiment2.getId(), realization); experimentList.add(experiment2); } } } model.addAttribute("experimentList", experimentList); model.addAttribute("realizationMap", realizationMap); // System.out.println("Elapsed time to get experiment page:" + (System.currentTimeMillis() - start)); return EXPERIMENTS; } @GetMapping(value = "/experiment_profile/{expId}") public String experimentProfile(@PathVariable String expId, Model model, HttpSession session, RedirectAttributes redirectAttributes) { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getExperiment(expId), HttpMethod.GET, request, String.class); log.info("experiment profile: extract experiment"); Experiment2 experiment2 = extractExperiment(response.getBody().toString()); log.info("experiment profile: extract realization"); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); if (isNotAdminAndNotInTeam(session, realization)) { log.warn("Permission denied to view experiment profile: {} for team: {}", realization.getExperimentName(), experiment2.getTeamName()); redirectAttributes.addFlashAttribute(MESSAGE, permissionDeniedMessage); return "redirect:/experiments"; } User2 experimentOwner = invokeAndExtractUserInfo(experiment2.getUserId()); // get experiment details // returns a json string in the format: // { // 'ns_file' : // { // 'msg' : 'success/fail', // 'ns_file' : 'ns_file_contents' // }, // 'realization_details' : // { // 'msg' : 'success/fail', // 'realization_details' : 'realization_details_contents' // }, // 'activity_log' : // { // 'msg' : 'success/fail', // 'activity_log' : 'activity_log_contents' // } // } // returns a '{}' otherwise if fail ResponseEntity expDetailsResponse = restTemplate.exchange(properties.getExperimentDetails(experiment2.getTeamId(), expId), HttpMethod.GET, request, String.class); log.debug("experiment profile - experiment details: {}", expDetailsResponse.getBody().toString()); model.addAttribute("experiment", experiment2); model.addAttribute("realization", realization); model.addAttribute("experimentOwner", experimentOwner.getFirstName() + ' ' + experimentOwner.getLastName()); model.addAttribute("experimentDetails", new JSONObject(expDetailsResponse.getBody().toString())); return "experiment_profile"; } @RequestMapping(value = "/experiments/create", method = RequestMethod.GET) public String createExperiment(Model model, HttpSession session) throws WebServiceRuntimeException { log.info("Loading create experiment page"); // a list of teams that the logged in user is in List<String> scenarioFileNameList = getScenarioFileNameList(); List<Team2> userTeamsList = new ArrayList<>(); // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); JSONObject object = new JSONObject(responseBody); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); Team2 team2 = extractTeamInfo(teamResponseBody); userTeamsList.add(team2); } model.addAttribute("scenarioFileNameList", scenarioFileNameList); model.addAttribute("experimentForm", new ExperimentForm()); model.addAttribute("userTeamsList", userTeamsList); return "experiment_page_create_experiment"; } @RequestMapping(value = "/experiments/create", method = RequestMethod.POST) public String validateExperiment( @ModelAttribute("experimentForm") ExperimentForm experimentForm, HttpSession session, BindingResult bindingResult, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { if (bindingResult.hasErrors()) { log.info("Create experiment - form has errors"); return "redirect:/experiments/create"; } if (experimentForm.getName() == null || experimentForm.getName().isEmpty()) { redirectAttributes.addFlashAttribute(MESSAGE, "Experiment Name cannot be empty"); return "redirect:/experiments/create"; } if (experimentForm.getDescription() == null || experimentForm.getDescription().isEmpty()) { redirectAttributes.addFlashAttribute(MESSAGE, "Description cannot be empty"); return "redirect:/experiments/create"; } experimentForm.setScenarioContents(getScenarioContentsFromFile(experimentForm.getScenarioFileName())); JSONObject experimentObject = new JSONObject(); experimentObject.put("userId", session.getAttribute("id").toString()); experimentObject.put(TEAM_ID, experimentForm.getTeamId()); experimentObject.put(TEAM_NAME, experimentForm.getTeamName()); experimentObject.put("name", experimentForm.getName().replaceAll("\\s+", "")); // truncate whitespaces and non-visible characters like \n experimentObject.put("description", experimentForm.getDescription()); experimentObject.put("nsFile", "file"); experimentObject.put("nsFileContent", experimentForm.getNsFileContent()); experimentObject.put("idleSwap", "240"); experimentObject.put("maxDuration", "960"); log.info("Calling service to create experiment"); HttpEntity<String> request = createHttpEntityWithBody(experimentObject.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getSioExpUrl(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case NS_FILE_PARSE_EXCEPTION: log.warn("Ns file error"); redirectAttributes.addFlashAttribute(MESSAGE, "There is an error when parsing the NS File."); break; case EXPERIMENT_NAME_ALREADY_EXISTS_EXCEPTION: log.warn("Exp name already exists"); redirectAttributes.addFlashAttribute(MESSAGE, "Experiment name already exists."); break; default: log.warn("Exp service or adapter fail"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } log.info("Experiment {} created", experimentForm); return "redirect:/experiments/create"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } // // TODO Uploaded function for network configuration and optional dataset // if (!networkFile.isEmpty()) { // try { // String networkFileName = getSessionIdOfLoggedInUser(session) + "-networkconfig-" + networkFile.getOriginalFilename(); // BufferedOutputStream stream = new BufferedOutputStream( // new FileOutputStream(new File(App.EXP_CONFIG_DIR + "/" + networkFileName))); // FileCopyUtils.copy(networkFile.getInputStream(), stream); // stream.close(); // redirectAttributes.addFlashAttribute(MESSAGE, // "You successfully uploaded " + networkFile.getOriginalFilename() + "!"); // // remember network file name here // } // catch (Exception e) { // redirectAttributes.addFlashAttribute(MESSAGE, // "You failed to upload " + networkFile.getOriginalFilename() + " => " + e.getMessage()); // return "redirect:/experiments/create"; // } // } // // if (!dataFile.isEmpty()) { // try { // String dataFileName = getSessionIdOfLoggedInUser(session) + "-data-" + dataFile.getOriginalFilename(); // BufferedOutputStream stream = new BufferedOutputStream( // new FileOutputStream(new File(App.EXP_CONFIG_DIR + "/" + dataFileName))); // FileCopyUtils.copy(dataFile.getInputStream(), stream); // stream.close(); // redirectAttributes.addFlashAttribute("message2", // "You successfully uploaded " + dataFile.getOriginalFilename() + "!"); // // remember data file name here // } // catch (Exception e) { // redirectAttributes.addFlashAttribute("message2", // "You failed to upload " + dataFile.getOriginalFilename() + " => " + e.getMessage()); // } // } // // // add current experiment to experiment manager // experimentManager.addExperiment(getSessionIdOfLoggedInUser(session), experiment); // // increase exp count to be display on Teams page // teamManager.incrementExperimentCount(experiment.getTeamId()); return "redirect:/experiments"; } @RequestMapping(value = "/experiments/save_image/{teamId}/{expId}/{nodeId}", method = RequestMethod.GET) public String saveExperimentImage(@PathVariable String teamId, @PathVariable String expId, @PathVariable String nodeId, Model model) { Map<String, Map<String, String>> singleNodeInfoMap = new HashMap<>(); Image saveImageForm = new Image(); String teamName = invokeAndExtractTeamInfo(teamId).getName(); Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); // experiment may have many nodes // extract just the particular node details to display for (Map.Entry<String, Map<String, String>> nodesInfo : realization.getNodesInfoMap().entrySet()) { String nodeName = nodesInfo.getKey(); Map<String, String> singleNodeDetailsMap = nodesInfo.getValue(); if (singleNodeDetailsMap.get(NODE_ID).equals(nodeId)) { singleNodeInfoMap.put(nodeName, singleNodeDetailsMap); // store the current os of the node into the form also // have to pass the the services saveImageForm.setCurrentOS(singleNodeDetailsMap.get("os")); } } saveImageForm.setTeamId(teamId); saveImageForm.setNodeId(nodeId); model.addAttribute("teamName", teamName); model.addAttribute("singleNodeInfoMap", singleNodeInfoMap); model.addAttribute("pathTeamId", teamId); model.addAttribute("pathExperimentId", expId); model.addAttribute("pathNodeId", nodeId); model.addAttribute("experimentName", realization.getExperimentName()); model.addAttribute("saveImageForm", saveImageForm); return "save_experiment_image"; } // bindingResult is required in the method signature to perform the JSR303 validation for Image object @RequestMapping(value = "/experiments/save_image/{teamId}/{expId}/{nodeId}", method = RequestMethod.POST) public String saveExperimentImage( @Valid @ModelAttribute("saveImageForm") Image saveImageForm, BindingResult bindingResult, RedirectAttributes redirectAttributes, @PathVariable String teamId, @PathVariable String expId, @PathVariable String nodeId) throws IOException { if (saveImageForm.getImageName().length() < 2) { log.warn("Save image form has errors {}", saveImageForm); redirectAttributes.addFlashAttribute("message", "Image name too short, minimum 2 characters"); return "redirect:/experiments/save_image/" + teamId + "/" + expId + "/" + nodeId; } log.info("Saving image: team {}, experiment {}, node {}", teamId, expId, nodeId); ObjectMapper mapper = new ObjectMapper(); HttpEntity<String> request = createHttpEntityWithBody(mapper.writeValueAsString(saveImageForm)); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.saveImage(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); log.warn("Save image: error with exception {}", exceptionState); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Save image: error, operation failed on DeterLab"); redirectAttributes.addFlashAttribute("message", error.getMessage()); break; case ADAPTER_CONNECTION_EXCEPTION: log.warn("Save image: error, cannot connect to adapter"); redirectAttributes.addFlashAttribute("message", "connection to adapter failed"); break; case ADAPTER_INTERNAL_ERROR_EXCEPTION: log.warn("Save image: error, adapter internal server error"); redirectAttributes.addFlashAttribute("message", "internal error was found on the adapter"); break; default: log.warn("Save image: other error"); redirectAttributes.addFlashAttribute("message", ERR_SERVER_OVERLOAD); } return "redirect:/experiments/save_image/" + teamId + "/" + expId + "/" + nodeId; } // everything looks ok log.info("Save image in progress: team {}, experiment {}, node {}, image {}", teamId, expId, nodeId, saveImageForm.getImageName()); return "redirect:/experiments"; } /* private String processSaveImageRequest(@Valid @ModelAttribute("saveImageForm") Image saveImageForm, RedirectAttributes redirectAttributes, @PathVariable String teamId, @PathVariable String expId, @PathVariable String nodeId, ResponseEntity response, String responseBody) throws IOException { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); log.warn("Save image exception: {}", exceptionState); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("adapter deterlab operation failed exception"); redirectAttributes.addFlashAttribute("message", error.getMessage()); break; default: log.warn("Image service or adapter fail"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute("message", ERR_SERVER_OVERLOAD); break; } return "redirect:/experiments/save_image/" + teamId + "/" + expId + "/" + nodeId; } else { // everything ok log.info("Image service in progress for Team: {}, Exp: {}, Node: {}, Image: {}", teamId, expId, nodeId, saveImageForm.getImageName()); return "redirect:/experiments"; } } */ // @RequestMapping("/experiments/configuration/{expId}") // public String viewExperimentConfiguration(@PathVariable Integer expId, Model model) { // // get experiment from expid // // retrieve the scenario contents to be displayed // Experiment currExp = experimentManager.getExperimentByExpId(expId); // model.addAttribute("scenarioContents", currExp.getScenarioContents()); // return "experiment_scenario_contents"; // } @RequestMapping("/remove_experiment/{teamName}/{teamId}/{expId}") public String removeExperiment(@PathVariable String teamName, @PathVariable String teamId, @PathVariable String expId, final RedirectAttributes redirectAttributes, HttpSession session) throws WebServiceRuntimeException { // ensure experiment is stopped first Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); Team2 team = invokeAndExtractTeamInfo(teamId); // check valid authentication to remove experiments // either admin, experiment creator or experiment owner if (!validateIfAdmin(session) && !realization.getUserId().equals(session.getAttribute("id").toString()) && !team.getOwner().getId().equals(session.getAttribute(webProperties.getSessionUserId()))) { log.warn("Permission denied when remove Team:{}, Experiment: {} with User: {}, Role:{}", teamId, expId, session.getAttribute("id"), session.getAttribute(webProperties.getSessionRoles())); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to remove experiment;" + permissionDeniedMessage); return "redirect:/experiments"; } if (!realization.getState().equals(RealizationState.NOT_RUNNING.toString())) { log.warn("Trying to remove Team: {}, Experiment: {} with State: {} that is still in progress?", teamId, expId, realization.getState()); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to remove Exp: " + realization.getExperimentName() + ". Please refresh the page again. If the error persists, please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } log.info("Removing experiment: at " + properties.getDeleteExperiment(teamId, expId)); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; try { response = restTemplate.exchange(properties.getDeleteExperiment(teamId, expId), HttpMethod.DELETE, request, String.class); } catch (Exception e) { log.warn("Error connecting to experiment service to remove experiment", e.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/experiments"; } String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case EXPERIMENT_DELETE_EXCEPTION: case FORBIDDEN_EXCEPTION: log.warn("remove experiment failed for Team: {}, Exp: {}", teamId, expId); redirectAttributes.addFlashAttribute(MESSAGE, error.getMessage()); break; case OBJECT_OPTIMISTIC_LOCKING_FAILURE_EXCEPTION: // do nothing log.info("remove experiment database locking failure"); break; default: // do nothing break; } return "redirect:/experiments"; } else { // everything ok log.info("remove experiment success for Team: {}, Exp: {}", teamId, expId); redirectAttributes.addFlashAttribute("exp_remove_message", "Team: " + teamName + " has removed Exp: " + realization.getExperimentName()); return "redirect:/experiments"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping("/start_experiment/{teamName}/{expId}") public String startExperiment( @PathVariable String teamName, @PathVariable String expId, final RedirectAttributes redirectAttributes, Model model, HttpSession session) throws WebServiceRuntimeException { // ensure experiment is stopped first before starting Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); if (!checkPermissionRealizeExperiment(realization, session)) { log.warn("Permission denied to start experiment: {} for team: {}", realization.getExperimentName(), teamName); redirectAttributes.addFlashAttribute(MESSAGE, permissionDeniedMessage); return "redirect:/experiments"; } String teamId = realization.getTeamId(); String teamStatus = getTeamStatus(teamId); if (!teamStatus.equals(TeamStatus.APPROVED.name())) { log.warn("Error: trying to realize an experiment {} on team {} with status {}", realization.getExperimentName(), teamId, teamStatus); redirectAttributes.addFlashAttribute(MESSAGE, teamName + " is in " + teamStatus + " status and does not have permission to start experiment. Please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } if (!realization.getState().equals(RealizationState.NOT_RUNNING.toString())) { log.warn("Trying to start Team: {}, Experiment: {} with State: {} that is not running?", teamName, expId, realization.getState()); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to start Exp: " + realization.getExperimentName() + ". Please refresh the page again. If the error persists, please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } //start experiment log.info("Starting experiment: at " + properties.getStartExperiment(teamName, expId)); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; try { response = restTemplate.exchange(properties.getStartExperiment(teamName, expId), HttpMethod.POST, request, String.class); } catch (Exception e) { log.warn("Error connecting to experiment service to start experiment", e.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/experiments"; } String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case EXPERIMENT_START_EXCEPTION: case FORBIDDEN_EXCEPTION: log.warn("start experiment failed for Team: {}, Exp: {}", teamName, expId); redirectAttributes.addFlashAttribute(MESSAGE, error.getMessage()); return "redirect:/experiments"; case TEAM_NOT_FOUND_EXCEPTION: log.warn("Check team quota to start experiment: Team {} not found", teamName); return REDIRECT_INDEX_PAGE; case INSUFFICIENT_QUOTA_EXCEPTION: log.warn("Check team quota to start experiment: Team {} do not have sufficient quota", teamName); redirectAttributes.addFlashAttribute(MESSAGE, "There is insufficient quota for you to start this experiment. Please contact your team leader for more details."); return "redirect:/experiments"; case OBJECT_OPTIMISTIC_LOCKING_FAILURE_EXCEPTION: // do nothing log.info("start experiment database locking failure"); break; default: // do nothing break; } log.warn("start experiment some other error occurred exception: {}", exceptionState); // possible for it to be error but experiment has started up finish // if user clicks on start but reloads the page // model.addAttribute(EXPERIMENT_MESSAGE, "Team: " + teamName + " has started Exp: " + realization.getExperimentName()); return EXPERIMENTS; } else { // everything ok log.info("start experiment success for Team: {}, Exp: {}", teamName, expId); redirectAttributes.addFlashAttribute(EXPERIMENT_MESSAGE, "Experiment " + realization.getExperimentName() + " in team " + teamName + " is starting. This may take up to 10 minutes depending on the scale of your experiment. Please refresh this page later."); return "redirect:/experiments"; } } catch (IOException e) { log.warn("start experiment error: {]", e.getMessage()); throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping("/stop_experiment/{teamName}/{expId}") public String stopExperiment(@PathVariable String teamName, @PathVariable String expId, Model model, final RedirectAttributes redirectAttributes, HttpSession session) throws WebServiceRuntimeException { // ensure experiment is active first before stopping Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); if (isNotAdminAndNotInTeam(session, realization)) { log.warn("Permission denied to stop experiment: {} for team: {}", realization.getExperimentName(), teamName); redirectAttributes.addFlashAttribute(MESSAGE, permissionDeniedMessage); return "redirect:/experiments"; } if (!realization.getState().equals(RealizationState.RUNNING.toString())) { log.warn("Trying to stop Team: {}, Experiment: {} with State: {} that is still in progress?", teamName, expId, realization.getState()); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to stop Exp: " + realization.getExperimentName() + ". Please refresh the page again. If the error persists, please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } log.info("Stopping experiment: at " + properties.getStopExperiment(teamName, expId)); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; return abc(teamName, expId, redirectAttributes, realization, request); } @RequestMapping("/get_topology/{teamName}/{expId}") @ResponseBody public String getTopology(@PathVariable String teamName, @PathVariable String expId) { try { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getTopology(teamName, expId), HttpMethod.GET, request, String.class); log.info("Retrieve experiment topo success"); return "data:image/png;base64," + response.getBody(); } catch (Exception e) { log.error("Error getting topology thumbnail", e.getMessage()); return ""; } } private String abc(@PathVariable String teamName, @PathVariable String expId, RedirectAttributes redirectAttributes, Realization realization, HttpEntity<String> request) throws WebServiceRuntimeException { ResponseEntity response; try { response = restTemplate.exchange(properties.getStopExperiment(teamName, expId), HttpMethod.POST, request, String.class); } catch (Exception e) { log.warn("Error connecting to experiment service to stop experiment", e.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/experiments"; } String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); if (exceptionState == ExceptionState.FORBIDDEN_EXCEPTION) { log.warn("Permission denied to stop experiment: {} for team: {}", realization.getExperimentName(), teamName); redirectAttributes.addFlashAttribute(MESSAGE, permissionDeniedMessage); } if (exceptionState == ExceptionState.OBJECT_OPTIMISTIC_LOCKING_FAILURE_EXCEPTION) { log.info("stop experiment database locking failure"); } } else { // everything ok log.info("stop experiment success for Team: {}, Exp: {}", teamName, expId); redirectAttributes.addFlashAttribute(EXPERIMENT_MESSAGE, "Experiment " + realization.getExperimentName() + " in team " + teamName + " is stopping. Please refresh this page in a few minutes."); } return "redirect:/experiments"; } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } private boolean isNotAdminAndNotInTeam(HttpSession session, Realization realization) { return !validateIfAdmin(session) && !checkPermissionRealizeExperiment(realization, session); } //----------------------------------------------------------------------- //--------------------------Admin Revamp--------------------------------- //----------------------------------------------------------------------- //---------------------------------Admin--------------------------------- @RequestMapping("/admin") public String admin(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } List<Team2> pendingApprovalTeamsList = new ArrayList<>(); //------------------------------------ // get list of teams pending for approval //------------------------------------ HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getSioTeamsUrl(), HttpMethod.GET, request, String.class); JSONArray jsonArray = new JSONArray(responseEntity.getBody().toString()); for (int i = 0; i < jsonArray.length(); i++) { JSONObject jsonObject = jsonArray.getJSONObject(i); Team2 one = extractTeamInfo(jsonObject.toString()); if (one.getStatus().equals(TeamStatus.PENDING.name())) { pendingApprovalTeamsList.add(one); } } model.addAttribute("pendingApprovalTeamsList", pendingApprovalTeamsList); return "admin3"; } @RequestMapping("/admin/data") public String adminDataManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of datasets //------------------------------------ HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getData(), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); List<Dataset> datasetsList = new ArrayList<>(); JSONArray dataJsonArray = new JSONArray(responseBody); for (int i = 0; i < dataJsonArray.length(); i++) { JSONObject dataInfoObject = dataJsonArray.getJSONObject(i); Dataset dataset = extractDataInfo(dataInfoObject.toString()); datasetsList.add(dataset); } ResponseEntity response4 = restTemplate.exchange(properties.getDownloadStat(), HttpMethod.GET, request, String.class); String responseBody4 = response4.getBody().toString(); Map<Integer, Long> dataDownloadStats = new HashMap<>(); JSONArray statJsonArray = new JSONArray(responseBody4); for (int i = 0; i < statJsonArray.length(); i++) { JSONObject statInfoObject = statJsonArray.getJSONObject(i); dataDownloadStats.put(statInfoObject.getInt("dataId"), statInfoObject.getLong("count")); } model.addAttribute("dataList", datasetsList); model.addAttribute("downloadStats", dataDownloadStats); return "data_dashboard"; } @RequestMapping("/admin/data/{datasetId}/resources") public String adminViewDataResources(@PathVariable String datasetId, Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //---------------------------------------- // get list of data resources in a dataset //---------------------------------------- Dataset dataset = invokeAndExtractDataInfo(Long.parseLong(datasetId)); model.addAttribute("dataset", dataset); return "admin_data_resources"; } @RequestMapping(value = "/admin/data/{datasetId}/resources/{resourceId}/update", method = RequestMethod.GET) public String adminUpdateResource(@PathVariable String datasetId, @PathVariable String resourceId, Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } Dataset dataset = invokeAndExtractDataInfo(Long.parseLong(datasetId)); DataResource currentDataResource = new DataResource(); for (DataResource dataResource : dataset.getDataResources()) { if (dataResource.getId() == Long.parseLong(resourceId)) { currentDataResource = dataResource; break; } } model.addAttribute("did", dataset.getId()); model.addAttribute("dataresource", currentDataResource); session.setAttribute(ORIGINAL_DATARESOURCE, currentDataResource); return "admin_data_resources_update"; } // updates the malicious status of a data resource @RequestMapping(value = "/admin/data/{datasetId}/resources/{resourceId}/update", method = RequestMethod.POST) public String adminUpdateResourceFormSubmit(@PathVariable String datasetId, @PathVariable String resourceId, @ModelAttribute DataResource dataResource, Model model, HttpSession session, RedirectAttributes redirectAttributes) throws IOException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } DataResource original = (DataResource) session.getAttribute(ORIGINAL_DATARESOURCE); Dataset dataset = invokeAndExtractDataInfo(Long.parseLong(datasetId)); updateDataset(dataset, dataResource); // add redirect attributes variable to notify what has been modified if (!original.getMaliciousFlag().equalsIgnoreCase(dataResource.getMaliciousFlag())) { redirectAttributes.addFlashAttribute("editMaliciousFlag", "success"); } log.info("Data updated... {}", dataset.getName()); model.addAttribute("did", dataset.getId()); model.addAttribute("dataresource", dataResource); session.removeAttribute(ORIGINAL_DATARESOURCE); return "redirect:/admin/data/" + datasetId + "/resources/" + resourceId + "/update"; } private Dataset updateDataset(Dataset dataset, DataResource dataResource) throws IOException { log.info("Data resource updating... {}", dataResource); HttpEntity<String> request = createHttpEntityWithBody(objectMapper.writeValueAsString(dataResource)); ResponseEntity response = restTemplate.exchange(properties.getResource(dataset.getId().toString(), dataResource.getId().toString()), HttpMethod.PUT, request, String.class); Dataset updatedDataset = extractDataInfo(response.getBody().toString()); log.info("Data resource updated... {}", dataResource.getUri()); return updatedDataset; } @RequestMapping("/admin/experiments") public String adminExperimentsManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of experiments //------------------------------------ HttpEntity<String> expRequest = createHttpEntityHeaderOnly(); ResponseEntity expResponseEntity = restTemplate.exchange(properties.getSioExpUrl(), HttpMethod.GET, expRequest, String.class); //------------------------------------ // get list of realizations //------------------------------------ HttpEntity<String> realizationRequest = createHttpEntityHeaderOnly(); ResponseEntity realizationResponseEntity = restTemplate.exchange(properties.getAllRealizations(), HttpMethod.GET, realizationRequest, String.class); JSONArray jsonExpArray = new JSONArray(expResponseEntity.getBody().toString()); JSONArray jsonRealizationArray = new JSONArray(realizationResponseEntity.getBody().toString()); Map<Experiment2, Realization> experiment2Map = new HashMap<>(); // exp id, experiment Map<Long, Realization> realizationMap = new HashMap<>(); // exp id, realization for (int k = 0; k < jsonRealizationArray.length(); k++) { Realization realization; try { realization = extractRealization(jsonRealizationArray.getJSONObject(k).toString()); } catch (JSONException e) { log.debug("Admin extract realization {}", e); realization = getCleanRealization(); } if (realization.getState().equals(RealizationState.RUNNING.name())) { realizationMap.put(realization.getExperimentId(), realization); } } for (int i = 0; i < jsonExpArray.length(); i++) { Experiment2 experiment2 = extractExperiment(jsonExpArray.getJSONObject(i).toString()); if (realizationMap.containsKey(experiment2.getId())) { experiment2Map.put(experiment2, realizationMap.get(experiment2.getId())); } } model.addAttribute("runningExpMap", experiment2Map); return "experiment_dashboard"; } @RequestMapping("/admin/teams") public String adminTeamsManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of teams //------------------------------------ TeamManager2 teamManager2 = new TeamManager2(); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getSioTeamsUrl(), HttpMethod.GET, request, String.class); JSONArray jsonArray = new JSONArray(responseEntity.getBody().toString()); for (int i = 0; i < jsonArray.length(); i++) { JSONObject jsonObject = jsonArray.getJSONObject(i); Team2 one = extractTeamInfo(jsonObject.toString()); teamManager2.addTeamToTeamMap(one); } model.addAttribute("teamsMap", teamManager2.getTeamMap()); return "team_dashboard"; } @RequestMapping("/admin/users") public String adminUsersManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of users //------------------------------------ Map<String, List<String>> userToTeamMap = new HashMap<>(); // userId : list of team names HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response2 = restTemplate.exchange(properties.getSioUsersUrl(), HttpMethod.GET, request, String.class); String responseBody2 = response2.getBody().toString(); JSONArray jsonUserArray = new JSONArray(responseBody2); List<User2> usersList = new ArrayList<>(); for (int i = 0; i < jsonUserArray.length(); i++) { JSONObject userObject = jsonUserArray.getJSONObject(i); User2 user = extractUserInfo(userObject.toString()); usersList.add(user); // get list of teams' names for each user List<String> perUserTeamList = new ArrayList<>(); if (userObject.get("teams") != null) { JSONArray teamJsonArray = userObject.getJSONArray("teams"); for (int k = 0; k < teamJsonArray.length(); k++) { Team2 team = invokeAndExtractTeamInfo(teamJsonArray.get(k).toString()); perUserTeamList.add(team.getName()); } userToTeamMap.put(user.getId(), perUserTeamList); } } model.addAttribute("usersList", usersList); model.addAttribute("userToTeamMap", userToTeamMap); return "user_dashboard"; } @RequestMapping("/admin/usage") public String adminTeamUsage(Model model, @RequestParam(value = "team", required = false) String team, @RequestParam(value = "start", required = false) String start, @RequestParam(value = "end", required = false) String end, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); ZonedDateTime now = ZonedDateTime.now(); if (start == null) { ZonedDateTime startDate = now.with(firstDayOfMonth()); start = startDate.format(formatter); } if (end == null) { ZonedDateTime endDate = now.with(lastDayOfMonth()); end = endDate.format(formatter); } // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getSioTeamsUrl(), HttpMethod.GET, request, String.class); JSONArray jsonArray = new JSONArray(responseEntity.getBody().toString()); TeamManager2 teamManager2 = new TeamManager2(); for (int i = 0; i < jsonArray.length(); i++) { JSONObject jsonObject = jsonArray.getJSONObject(i); Team2 one = extractTeamInfo(jsonObject.toString()); teamManager2.addTeamToTeamMap(one); } if (team != null) { responseEntity = restTemplate.exchange(properties.getUsageStat(team, "startDate=" + start, "endDate=" + end), HttpMethod.GET, request, String.class); String usage = responseEntity.getBody().toString(); model.addAttribute("usage", usage); } model.addAttribute("teamsMap", teamManager2.getTeamMap()); model.addAttribute("start", start); model.addAttribute("end", end); model.addAttribute("team", team); return "usage_statistics"; } @RequestMapping(value = "/admin/energy", method = RequestMethod.GET) public String adminEnergy(Model model, @RequestParam(value = "start", required = false) String start, @RequestParam(value = "end", required = false) String end, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); ZonedDateTime now = ZonedDateTime.now(); if (start == null) { ZonedDateTime startDate = now.with(firstDayOfMonth()); start = startDate.format(formatter); } if (end == null) { ZonedDateTime endDate = now.with(lastDayOfMonth()); end = endDate.format(formatter); } HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity; try { responseEntity = restTemplate.exchange(properties.getEnergyStatistics("startDate=" + start, "endDate=" + end), HttpMethod.GET, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio analytics service for energy usage: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_ENERGY_USAGE; } String responseBody = responseEntity.getBody().toString(); JSONArray jsonArray = new JSONArray(responseBody); // handling exceptions from SIO if (RestUtil.isError(responseEntity.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case START_DATE_AFTER_END_DATE_EXCEPTION: log.warn("Get energy usage : Start date after end date error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_START_DATE_AFTER_END_DATE); return REDIRECT_ENERGY_USAGE; default: log.warn("Get energy usage : sio or deterlab adapter connection error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_ENERGY_USAGE; } } else { log.info("Get energy usage info : {}", responseBody); } DecimalFormat df2 = new DecimalFormat(".##"); double sumEnergy = 0.00; List<String> listOfDate = new ArrayList<>(); List<Double> listOfEnergy = new ArrayList<>(); ZonedDateTime currentZonedDateTime = convertToZonedDateTime(start); String currentDate = null; for (int i = 0; i < jsonArray.length(); i++) { sumEnergy += jsonArray.getDouble(i); // add into listOfDate to display graph currentDate = currentZonedDateTime.format(formatter); listOfDate.add(currentDate); // add into listOfEnergy to display graph double energy = Double.valueOf(df2.format(jsonArray.getDouble(i))); listOfEnergy.add(energy); currentZonedDateTime = convertToZonedDateTime(currentDate).plusDays(1); } sumEnergy = Double.valueOf(df2.format(sumEnergy)); model.addAttribute("listOfDate", listOfDate); model.addAttribute("listOfEnergy", listOfEnergy); model.addAttribute("start", start); model.addAttribute("end", end); model.addAttribute("energy", sumEnergy); return "energy_usage"; } /** * Get simple ZonedDateTime from date string in the format 'YYYY-MM-DD'. * @param date date string to convert * @return ZonedDateTime of */ private ZonedDateTime convertToZonedDateTime(String date) { String[] result = date.split("-"); return ZonedDateTime.of( Integer.parseInt(result[0]), Integer.parseInt(result[1]), Integer.parseInt(result[2]), 0, 0, 0, 0, ZoneId.of("Asia/Singapore")); } // @RequestMapping(value="/admin/domains/add", method=RequestMethod.POST) // public String addDomain(@Valid Domain domain, BindingResult bindingResult) { // if (bindingResult.hasErrors()) { // return "redirect:/admin"; // } else { // domainManager.addDomains(domain.getDomainName()); // } // return "redirect:/admin"; // } // @RequestMapping("/admin/domains/remove/{domainKey}") // public String removeDomain(@PathVariable String domainKey) { // domainManager.removeDomains(domainKey); // return "redirect:/admin"; // } @RequestMapping("/admin/teams/accept/{teamId}/{teamOwnerId}") public String approveTeam( @PathVariable String teamId, @PathVariable String teamOwnerId, final RedirectAttributes redirectAttributes, HttpSession session ) throws WebServiceRuntimeException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //FIXME require approver info log.info("Approving new team {}, team owner {}", teamId, teamOwnerId); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange( properties.getApproveTeam(teamId, teamOwnerId, TeamStatus.APPROVED), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error; try { error = objectMapper.readValue(responseBody, MyErrorResource.class); } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Approve team: TeamId cannot be null or empty: {}", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "TeamId cannot be null or empty"); break; case USER_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Approve team: UserId cannot be null or empty: {}", teamOwnerId); redirectAttributes.addFlashAttribute(MESSAGE, "UserId cannot be null or empty"); break; case EMAIL_NOT_VERIFIED_EXCEPTION: log.warn("Approve team: User {} email not verified", teamOwnerId); redirectAttributes.addFlashAttribute(MESSAGE, "User email has not been verified"); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn("Approve team: TeamStatus is invalid"); redirectAttributes.addFlashAttribute(MESSAGE, "Team status is invalid"); break; case TEAM_NOT_FOUND_EXCEPTION: log.warn("Approve team: Team {} not found", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Team does not exist"); break; case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Approve team: Team {} fail", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Approve team request fail on Deterlab"); break; default: log.warn("Approve team : sio or deterlab adapter connection error"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } // http status code is OK, then need to check the response message String msg = new JSONObject(responseBody).getString("msg"); if ("approve project OK".equals(msg)) { log.info("Approve team {} OK", teamId); } else { log.warn("Approve team {} FAIL", teamId); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } @RequestMapping("/admin/teams/reject/{teamId}/{teamOwnerId}") public String rejectTeam( @PathVariable String teamId, @PathVariable String teamOwnerId, @RequestParam("reason") String reason, final RedirectAttributes redirectAttributes, HttpSession session ) throws WebServiceRuntimeException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //FIXME require approver info log.info("Rejecting new team {}, team owner {}, reason {}", teamId, teamOwnerId, reason); HttpEntity<String> request = createHttpEntityWithBody(reason); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange( properties.getApproveTeam(teamId, teamOwnerId, TeamStatus.REJECTED), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error; try { error = objectMapper.readValue(responseBody, MyErrorResource.class); } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Reject team: TeamId cannot be null or empty: {}", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "TeamId cannot be null or empty"); break; case USER_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Reject team: UserId cannot be null or empty: {}", teamOwnerId); redirectAttributes.addFlashAttribute(MESSAGE, "UserId cannot be null or empty"); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn("Reject team: TeamStatus is invalid"); redirectAttributes.addFlashAttribute(MESSAGE, "Team status is invalid"); break; case TEAM_NOT_FOUND_EXCEPTION: log.warn("Reject team: Team {} not found", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Team does not exist"); break; case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Reject team: Team {} fail", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Reject team request fail on Deterlab"); break; default: log.warn("Reject team : sio or deterlab adapter connection error"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } // http status code is OK, then need to check the response message String msg = new JSONObject(responseBody).getString("msg"); if ("reject project OK".equals(msg)) { log.info("Reject team {} OK", teamId); } else { log.warn("Reject team {} FAIL", teamId); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } @RequestMapping("/admin/teams/{teamId}") public String setupTeamRestriction( @PathVariable final String teamId, @RequestParam(value = "action", required = true) final String action, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { final String LOG_MESSAGE = "Updating restriction settings for team {}: {}"; // check if admin if (!validateIfAdmin(session)) { log.warn(LOG_MESSAGE, teamId, PERMISSION_DENIED); return NO_PERMISSION_PAGE; } Team2 team = invokeAndExtractTeamInfo(teamId); // check if team is approved before restricted if ("restrict".equals(action) && team.getStatus().equals(TeamStatus.APPROVED.name())) { return restrictTeam(team, redirectAttributes); } // check if team is restricted before freeing it back to approved else if ("free".equals(action) && team.getStatus().equals(TeamStatus.RESTRICTED.name())) { return freeTeam(team, redirectAttributes); } else { log.warn(LOG_MESSAGE, teamId, "Cannot " + action + " team with status " + team.getStatus()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + "Cannot " + action + " team " + team.getName() + " with status " + team.getStatus()); return "redirect:/admin/teams"; } } private String restrictTeam(final Team2 team, RedirectAttributes redirectAttributes) throws IOException { log.info("Restricting team {}", team.getId()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioTeamsStatusUrl(team.getId(), TeamStatus.RESTRICTED), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); String logMessage = "Failed to restrict team {}: {}"; switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn(logMessage, team.getId(), TEAM_NOT_FOUND); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + TEAM_NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case FORBIDDEN_EXCEPTION: log.warn(logMessage, team.getId(), PERMISSION_DENIED); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + PERMISSION_DENIED); break; default: log.warn(logMessage, team.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } else { // good log.info("Team {} has been restricted", team.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Team " + team.getName() + " status has been changed to " + TeamStatus.RESTRICTED.name()); return "redirect:/admin"; } } private String freeTeam(final Team2 team, RedirectAttributes redirectAttributes) throws IOException { log.info("Freeing team {}", team.getId()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioTeamsStatusUrl(team.getId(), TeamStatus.APPROVED), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); String logMessage = "Failed to free team {}: {}"; switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn(logMessage, team.getId(), TEAM_NOT_FOUND); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + TEAM_NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case FORBIDDEN_EXCEPTION: log.warn(logMessage, team.getId(), PERMISSION_DENIED); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + PERMISSION_DENIED); break; default: log.warn(logMessage, team.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } else { // good log.info("Team {} has been freed", team.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Team " + team.getName() + " status has been changed to " + TeamStatus.APPROVED.name()); return "redirect:/admin"; } } @RequestMapping("/admin/users/{userId}") public String freezeUnfreezeUsers( @PathVariable final String userId, @RequestParam(value = "action", required = true) final String action, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { User2 user = invokeAndExtractUserInfo(userId); // check if admin if (!validateIfAdmin(session)) { log.warn("Access denied when trying to freeze/unfreeze user {}: must be admin!", userId); return NO_PERMISSION_PAGE; } // check if user status is approved before freeze if ("freeze".equals(action) && user.getStatus().equals(UserStatus.APPROVED.toString())) { return freezeUser(user, redirectAttributes); } // check if user status is frozen before unfreeze else if ("unfreeze".equals(action) && user.getStatus().equals(UserStatus.FROZEN.toString())) { return unfreezeUser(user, redirectAttributes); } else { log.warn("Error in freeze/unfreeze user {}: failed to {} user with status {}", userId, action, user.getStatus()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + "failed to " + action + " user " + user.getEmail() + " with status " + user.getStatus()); return "redirect:/admin/users"; } } private String freezeUser(final User2 user, RedirectAttributes redirectAttributes) throws IOException { log.info("Freezing user {}, email {}", user.getId(), user.getEmail()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioUsersStatusUrl(user.getId(), UserStatus.FROZEN.toString()), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case USER_NOT_FOUND_EXCEPTION: log.warn("Failed to freeze user {}: user not found", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn("Failed to freeze user {}: invalid status transition {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not allowed."); break; case INVALID_USER_STATUS_EXCEPTION: log.warn("Failed to freeze user {}: invalid user status {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not a valid status."); break; case FORBIDDEN_EXCEPTION: log.warn("Failed to freeze user {}: must be an Admin", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " permission denied."); break; default: log.warn("Failed to freeze user {}: {}", user.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } else { // good log.info("User {} has been frozen", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "User " + user.getEmail() + " has been banned."); return "redirect:/admin"; } } private String unfreezeUser(final User2 user, RedirectAttributes redirectAttributes) throws IOException { log.info("Unfreezing user {}, email {}", user.getId(), user.getEmail()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioUsersStatusUrl(user.getId(), UserStatus.APPROVED.toString()), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case USER_NOT_FOUND_EXCEPTION: log.warn("Failed to unfreeze user {}: user not found", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn("Failed to unfreeze user {}: invalid status transition {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not allowed."); break; case INVALID_USER_STATUS_EXCEPTION: log.warn("Failed to unfreeze user {}: invalid user status {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not a valid status."); break; case FORBIDDEN_EXCEPTION: log.warn("Failed to unfreeze user {}: must be an Admin", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " permission denied."); break; default: log.warn("Failed to unfreeze user {}: {}", user.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } else { // good log.info("User {} has been unfrozen", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "User " + user.getEmail() + " has been unbanned."); return "redirect:/admin"; } } @RequestMapping("/admin/users/{userId}/remove") public String removeUser(@PathVariable final String userId, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { // check if admin if (!validateIfAdmin(session)) { log.warn("Access denied when trying to remove user {}: must be admin!", userId); return NO_PERMISSION_PAGE; } User2 user = invokeAndExtractUserInfo(userId); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getUser(user.getId()), HttpMethod.DELETE, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case USER_NOT_FOUND_EXCEPTION: log.warn("Failed to remove user {}: user not found", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + NOT_FOUND); break; case USER_IS_NOT_DELETABLE_EXCEPTION: log.warn("Failed to remove user {}: user is not deletable", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + " is not deletable."); break; case CREDENTIALS_NOT_FOUND_EXCEPTION: log.warn("Failed to remove user {}: unable to find credentials", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + " is not found."); break; default: log.warn("Failed to remove user {}: {}", user.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } } else { log.info("User {} has been removed", userId); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "User " + user.getEmail() + " has been removed."); } return "redirect:/admin/users"; } // @RequestMapping("/admin/experiments/remove/{expId}") // public String adminRemoveExp(@PathVariable Integer expId) { // int teamId = experimentManager.getExperimentByExpId(expId).getTeamId(); // experimentManager.adminRemoveExperiment(expId); // // // decrease exp count to be display on Teams page // teamManager.decrementExperimentCount(teamId); // return "redirect:/admin"; // } // @RequestMapping(value="/admin/data/contribute", method=RequestMethod.GET) // public String adminContributeDataset(Model model) { // model.addAttribute("dataset", new Dataset()); // // File rootFolder = new File(App.ROOT); // List<String> fileNames = Arrays.stream(rootFolder.listFiles()) // .map(f -> f.getError()) // .collect(Collectors.toList()); // // model.addAttribute("files", // Arrays.stream(rootFolder.listFiles()) // .sorted(Comparator.comparingLong(f -> -1 * f.lastModified())) // .map(f -> f.getError()) // .collect(Collectors.toList()) // ); // // return "admin_contribute_data"; // } // @RequestMapping(value="/admin/data/contribute", method=RequestMethod.POST) // public String validateAdminContributeDataset(@ModelAttribute("dataset") Dataset dataset, HttpSession session, @RequestParam("file") MultipartFile file, RedirectAttributes redirectAttributes) throws IOException { // BufferedOutputStream stream = null; // FileOutputStream fileOutputStream = null; // // TODO // // validation // // get file from user upload to server // if (!file.isEmpty()) { // try { // String fileName = getSessionIdOfLoggedInUser(session) + "-" + file.getOriginalFilename(); // fileOutputStream = new FileOutputStream(new File(App.ROOT + "/" + fileName)); // stream = new BufferedOutputStream(fileOutputStream); // FileCopyUtils.copy(file.getInputStream(), stream); // redirectAttributes.addFlashAttribute(MESSAGE, // "You successfully uploaded " + file.getOriginalFilename() + "!"); // datasetManager.addDataset(getSessionIdOfLoggedInUser(session), dataset, file.getOriginalFilename()); // } // catch (Exception e) { // redirectAttributes.addFlashAttribute(MESSAGE, // "You failed to upload " + file.getOriginalFilename() + " => " + e.getMessage()); // } finally { // if (stream != null) { // stream.close(); // } // if (fileOutputStream != null) { // fileOutputStream.close(); // } // } // } // else { // redirectAttributes.addFlashAttribute(MESSAGE, // "You failed to upload " + file.getOriginalFilename() + " because the file was empty"); // } // return "redirect:/admin"; // } // @RequestMapping("/admin/data/remove/{datasetId}") // public String adminRemoveDataset(@PathVariable Integer datasetId) { // datasetManager.removeDataset(datasetId); // return "redirect:/admin"; // } // @RequestMapping(value="/admin/node/add", method=RequestMethod.GET) // public String adminAddNode(Model model) { // model.addAttribute("node", new Node()); // return "admin_add_node"; // } // @RequestMapping(value="/admin/node/add", method=RequestMethod.POST) // public String adminAddNode(@ModelAttribute("node") Node node) { // // TODO // // validate fields, eg should be integer // nodeManager.addNode(node); // return "redirect:/admin"; // } //--------------------------Static pages for teams-------------------------- @RequestMapping("/teams/team_application_submitted") public String teamAppSubmitFromTeamsPage() { return "team_page_application_submitted"; } @RequestMapping("/teams/join_application_submitted/{teamName}") public String teamAppJoinFromTeamsPage(@PathVariable String teamName, Model model) throws WebServiceRuntimeException { log.info("Redirecting to join application submitted page"); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getTeamByName(teamName), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn("submitted join team request : team name error"); break; default: log.warn("submitted join team request : some other failure"); // possible sio or adapter connection fail break; } return "redirect:/teams/join_team"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } Team2 one = extractTeamInfo(responseBody); model.addAttribute("team", one); return "team_page_join_application_submitted"; } //--------------------------Static pages for sign up-------------------------- @RequestMapping("/team_application_submitted") public String teamAppSubmit() { return "team_application_submitted"; } /** * A page to show new users has successfully registered to apply to join an existing team * The page contains the team owner information which the users requested to join * * @param model The model which is passed from signup * @return A success page otherwise an error page if the user tries to access this page directly */ @RequestMapping("/join_application_submitted") public String joinTeamAppSubmit(Model model) { // model attribute should be passed from /signup2 // team is required to display the team owner details if (model.containsAttribute("team")) { return "join_team_application_submitted"; } return "error"; } @RequestMapping("/email_not_validated") public String emailNotValidated() { return "email_not_validated"; } @RequestMapping("/team_application_under_review") public String teamAppUnderReview() { return "team_application_under_review"; } // model attribute name come from /login @RequestMapping("/email_checklist") public String emailChecklist(@ModelAttribute("statuschecklist") String status) { return "email_checklist"; } @RequestMapping("/join_application_awaiting_approval") public String joinTeamAppAwaitingApproval(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "join_team_application_awaiting_approval"; } //--------------------------Get List of scenarios filenames-------------------------- private List<String> getScenarioFileNameList() throws WebServiceRuntimeException { log.info("Retrieving scenario file names"); // List<String> scenarioFileNameList = null; // try { // scenarioFileNameList = IOUtils.readLines(getClass().getClassLoader().getResourceAsStream("scenarios"), StandardCharsets.UTF_8); // } catch (IOException e) { // throw new WebServiceRuntimeException(e.getMessage()); // } // File folder = null; // try { // folder = new ClassPathResource("scenarios").getFile(); // } catch (IOException e) { // throw new WebServiceRuntimeException(e.getMessage()); // } // List<String> scenarioFileNameList = new ArrayList<>(); // File[] files = folder.listFiles(); // for (File file : files) { // if (file.isFile()) { // scenarioFileNameList.add(file.getError()); // } // } // FIXME: hardcode list of filenames for now List<String> scenarioFileNameList = new ArrayList<>(); scenarioFileNameList.add("Scenario 1 - Experiment with a single node"); scenarioFileNameList.add("Scenario 2 - Experiment with 2 nodes and 10Gb link"); scenarioFileNameList.add("Scenario 3 - Experiment with 3 nodes in a LAN"); scenarioFileNameList.add("Scenario 4 - Experiment with 2 nodes and customized link property"); scenarioFileNameList.add("Scenario 5 - Single SDN switch connected to two nodes"); scenarioFileNameList.add("Scenario 6 - Tree Topology with configurable SDN switches"); // scenarioFileNameList.add("Scenario 4 - Two nodes linked with a 10Gbps SDN switch"); // scenarioFileNameList.add("Scenario 5 - Three nodes with Blockchain capabilities"); log.info("Scenario file list: {}", scenarioFileNameList); return scenarioFileNameList; } private String getScenarioContentsFromFile(String scenarioFileName) throws WebServiceRuntimeException { // FIXME: switch to better way of referencing scenario descriptions to actual filenames String actualScenarioFileName; if (scenarioFileName.contains("Scenario 1")) { actualScenarioFileName = "basic1.ns"; } else if (scenarioFileName.contains("Scenario 2")) { actualScenarioFileName = "basic2.ns"; } else if (scenarioFileName.contains("Scenario 3")) { actualScenarioFileName = "basic3.ns"; } else if (scenarioFileName.contains("Scenario 4")) { actualScenarioFileName = "basic4.ns"; } else if (scenarioFileName.contains("Scenario 5")) { actualScenarioFileName = "basic5.ns"; } else if (scenarioFileName.contains("Scenario 6")) { actualScenarioFileName = "basic6.ns"; } else { // defaults to basic single node actualScenarioFileName = "basic1.ns"; } try { log.info("Retrieving scenario files {}", getClass().getClassLoader().getResourceAsStream("scenarios/" + actualScenarioFileName)); List<String> lines = IOUtils.readLines(getClass().getClassLoader().getResourceAsStream("scenarios/" + actualScenarioFileName), StandardCharsets.UTF_8); StringBuilder sb = new StringBuilder(); for (String line : lines) { sb.append(line); sb.append(System.getProperty("line.separator")); } log.info("Experiment ns file contents: {}", sb); return sb.toString(); } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } //---Check if user is a team owner and has any join request waiting for approval---- private boolean hasAnyJoinRequest(HashMap<Integer, Team> teamMapOwnedByUser) { for (Map.Entry<Integer, Team> entry : teamMapOwnedByUser.entrySet()) { Team currTeam = entry.getValue(); if (currTeam.isUserJoinRequestEmpty() == false) { // at least one team has join user request return true; } } // loop through all teams but never return a single true // therefore, user's controlled teams has no join request return false; } //--------------------------MISC-------------------------- private int getSessionIdOfLoggedInUser(HttpSession session) { return Integer.parseInt(session.getAttribute(SESSION_LOGGED_IN_USER_ID).toString()); } private User2 extractUserInfo(String userJson) { User2 user2 = new User2(); if (userJson == null) { // return empty user return user2; } JSONObject object = new JSONObject(userJson); JSONObject userDetails = object.getJSONObject("userDetails"); JSONObject address = userDetails.getJSONObject("address"); user2.setId(object.getString("id")); user2.setFirstName(getJSONStr(userDetails.getString("firstName"))); user2.setLastName(getJSONStr(userDetails.getString("lastName"))); user2.setJobTitle(userDetails.getString("jobTitle")); user2.setEmail(userDetails.getString("email")); user2.setPhone(userDetails.getString("phone")); user2.setAddress1(address.getString("address1")); user2.setAddress2(address.getString("address2")); user2.setCountry(address.getString("country")); user2.setRegion(address.getString("region")); user2.setPostalCode(address.getString("zipCode")); user2.setCity(address.getString("city")); user2.setInstitution(userDetails.getString("institution")); user2.setInstitutionAbbreviation(userDetails.getString("institutionAbbreviation")); user2.setInstitutionWeb(userDetails.getString("institutionWeb")); user2.setStatus(object.getString("status")); user2.setEmailVerified(object.getBoolean("emailVerified")); // applicationDate is ZonedDateTime try { user2.setApplicationDate(object.get(APPLICATION_DATE).toString()); } catch (Exception e) { // since applicationDate date is a ZonedDateTime and not String // set to '?' at the html page log.warn("Error getting user application date {}", e); } return user2; } private Team2 extractTeamInfo(String json) { Team2 team2 = new Team2(); JSONObject object = new JSONObject(json); JSONArray membersArray = object.getJSONArray("members"); // createdDate is ZonedDateTime // processedDate is ZonedDateTime try { team2.setApplicationDate(object.get(APPLICATION_DATE).toString()); team2.setProcessedDate(object.get("processedDate").toString()); } catch (Exception e) { log.warn("Error getting team application date and/or processedDate {}", e); // created date is a ZonedDateTime // since created date and proccessed date is a ZonedDateTime and not String // both is set to '?' at the html page if exception } team2.setId(object.getString("id")); team2.setName(object.getString("name")); team2.setDescription(object.getString("description")); team2.setWebsite(object.getString("website")); team2.setOrganisationType(object.getString("organisationType")); team2.setStatus(object.getString("status")); team2.setVisibility(object.getString("visibility")); for (int i = 0; i < membersArray.length(); i++) { JSONObject memberObject = membersArray.getJSONObject(i); String userId = memberObject.getString("userId"); String teamMemberType = memberObject.getString(MEMBER_TYPE); String teamMemberStatus = memberObject.getString("memberStatus"); User2 myUser = invokeAndExtractUserInfo(userId); if (teamMemberType.equals(MemberType.MEMBER.name())) { // add to pending members list for Members Awaiting Approval function if (teamMemberStatus.equals(MemberStatus.PENDING.name())) { team2.addPendingMembers(myUser); } } else if (teamMemberType.equals(MemberType.OWNER.name())) { // explicit safer check team2.setOwner(myUser); } team2.addMembersToStatusMap(MemberStatus.valueOf(teamMemberStatus), myUser); } team2.setMembersCount(team2.getMembersStatusMap().get(MemberStatus.APPROVED).size()); return team2; } // use to extract JSON Strings from services // in the case where the JSON Strings are null, return "Connection Error" private String getJSONStr(String jsonString) { if (jsonString == null || jsonString.isEmpty()) { return CONNECTION_ERROR; } return jsonString; } /** * Checks if user is pending for join request approval from team leader * Use for fixing bug for view experiment page where users previously can view the experiments just by issuing a join request * * @param json the response body after calling team service * @param loginUserId the current logged in user id * @return True if the user is anything but APPROVED, false otherwise */ private boolean isMemberJoinRequestPending(String loginUserId, String json) { if (json == null) { return true; } JSONObject object = new JSONObject(json); JSONArray membersArray = object.getJSONArray("members"); for (int i = 0; i < membersArray.length(); i++) { JSONObject memberObject = membersArray.getJSONObject(i); String userId = memberObject.getString("userId"); String teamMemberStatus = memberObject.getString("memberStatus"); if (userId.equals(loginUserId) && !teamMemberStatus.equals(MemberStatus.APPROVED.toString())) { return true; } } log.info("User: {} is viewing experiment page", loginUserId); return false; } private Team2 extractTeamInfoUserJoinRequest(String userId, String json) { Team2 team2 = new Team2(); JSONObject object = new JSONObject(json); JSONArray membersArray = object.getJSONArray("members"); for (int i = 0; i < membersArray.length(); i++) { JSONObject memberObject = membersArray.getJSONObject(i); String uid = memberObject.getString("userId"); String teamMemberStatus = memberObject.getString("memberStatus"); if (uid.equals(userId) && teamMemberStatus.equals(MemberStatus.PENDING.toString())) { team2.setId(object.getString("id")); team2.setName(object.getString("name")); team2.setDescription(object.getString("description")); team2.setWebsite(object.getString("website")); team2.setOrganisationType(object.getString("organisationType")); team2.setStatus(object.getString("status")); team2.setVisibility(object.getString("visibility")); team2.setMembersCount(membersArray.length()); return team2; } } // no such member in the team found return null; } protected Dataset invokeAndExtractDataInfo(Long dataId) { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getDataset(dataId.toString()), HttpMethod.GET, request, String.class); return extractDataInfo(response.getBody().toString()); } protected Dataset extractDataInfo(String json) { log.debug(json); JSONObject object = new JSONObject(json); Dataset dataset = new Dataset(); dataset.setId(object.getInt("id")); dataset.setName(object.getString("name")); dataset.setDescription(object.getString("description")); dataset.setContributorId(object.getString("contributorId")); dataset.addVisibility(object.getString("visibility")); dataset.addAccessibility(object.getString("accessibility")); try { dataset.setReleasedDate(getZonedDateTime(object.get("releasedDate").toString())); } catch (IOException e) { log.warn("Error getting released date {}", e); dataset.setReleasedDate(null); } dataset.setCategoryId(object.getInt("categoryId")); dataset.setLicenseId(object.getInt("licenseId")); dataset.setContributor(invokeAndExtractUserInfo(dataset.getContributorId())); dataset.setCategory(invokeAndExtractCategoryInfo(dataset.getCategoryId())); dataset.setLicense(invokeAndExtractLicenseInfo(dataset.getLicenseId())); JSONArray resources = object.getJSONArray("resources"); for (int i = 0; i < resources.length(); i++) { JSONObject resource = resources.getJSONObject(i); DataResource dataResource = new DataResource(); dataResource.setId(resource.getLong("id")); dataResource.setUri(resource.getString("uri")); dataResource.setMalicious(resource.getBoolean("malicious")); dataResource.setScanned(resource.getBoolean("scanned")); dataset.addResource(dataResource); } JSONArray approvedUsers = object.getJSONArray("approvedUsers"); for (int i = 0; i < approvedUsers.length(); i++) { dataset.addApprovedUser(approvedUsers.getString(i)); } JSONArray keywords = object.getJSONArray("keywords"); List<String> keywordList = new ArrayList<>(); for (int i = 0; i < keywords.length(); i++) { keywordList.add(keywords.getString(i)); } dataset.setKeywordList(keywordList); return dataset; } protected DataCategory extractCategoryInfo(String json) { log.debug(json); DataCategory dataCategory = new DataCategory(); JSONObject object = new JSONObject(json); dataCategory.setId(object.getLong("id")); dataCategory.setName(object.getString("name")); dataCategory.setDescription(object.getString("description")); return dataCategory; } protected DataLicense extractLicenseInfo(String json) { log.debug(json); DataLicense dataLicense = new DataLicense(); JSONObject object = new JSONObject(json); dataLicense.setId(object.getLong("id")); dataLicense.setName(object.getString("name")); dataLicense.setAcronym(object.getString("acronym")); dataLicense.setDescription(object.getString("description")); dataLicense.setLink(object.getString("link")); return dataLicense; } protected DataCategory invokeAndExtractCategoryInfo(Integer categoryId) { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response; try { response = restTemplate.exchange(properties.getCategory(categoryId), HttpMethod.GET, request, String.class); } catch (Exception e) { log.warn("Data service not available to retrieve Category: {}", categoryId); return new DataCategory(); } return extractCategoryInfo(response.getBody().toString()); } protected DataLicense invokeAndExtractLicenseInfo(Integer licenseId) { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response; try { response = restTemplate.exchange(properties.getLicense(licenseId), HttpMethod.GET, request, String.class); } catch (Exception e) { log.warn("Data service not available to retrieve License: {}", licenseId); return new DataLicense(); } return extractLicenseInfo(response.getBody().toString()); } protected User2 invokeAndExtractUserInfo(String userId) { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response; try { response = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); } catch (Exception e) { log.warn("User service not available to retrieve User: {}", userId); return new User2(); } return extractUserInfo(response.getBody().toString()); } private Team2 invokeAndExtractTeamInfo(String teamId) { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, request, String.class); return extractTeamInfo(responseEntity.getBody().toString()); } private Experiment2 extractExperiment(String experimentJson) { log.info("{}", experimentJson); Experiment2 experiment2 = new Experiment2(); JSONObject object = new JSONObject(experimentJson); experiment2.setId(object.getLong("id")); experiment2.setUserId(object.getString("userId")); experiment2.setTeamId(object.getString(TEAM_ID)); experiment2.setTeamName(object.getString(TEAM_NAME)); experiment2.setName(object.getString("name")); experiment2.setDescription(object.getString("description")); experiment2.setNsFile(object.getString("nsFile")); experiment2.setNsFileContent(object.getString("nsFileContent")); experiment2.setIdleSwap(object.getInt("idleSwap")); experiment2.setMaxDuration(object.getInt("maxDuration")); try { experiment2.setCreatedDate(object.get("createdDate").toString()); } catch (Exception e) { experiment2.setCreatedDate(""); } try { experiment2.setLastModifiedDate(object.get("lastModifiedDate").toString()); } catch (Exception e) { experiment2.setLastModifiedDate(""); } return experiment2; } private Realization invokeAndExtractRealization(String teamName, Long id) { HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = null; try { log.info("retrieving the latest exp status: {}", properties.getRealizationByTeam(teamName, id.toString())); response = restTemplate.exchange(properties.getRealizationByTeam(teamName, id.toString()), HttpMethod.GET, request, String.class); } catch (Exception e) { return getCleanRealization(); } String responseBody; if (response.getBody() == null) { return getCleanRealization(); } else { responseBody = response.getBody().toString(); } try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); log.warn("error in retrieving realization for team: {}, realization: {}", teamName, id); return getCleanRealization(); } else { // will throw JSONException if the format return by sio is not a valid JSOn format // will occur if the realization details are still in the old format return extractRealization(responseBody); } } catch (IOException | JSONException e) { return getCleanRealization(); } } private Realization extractRealization(String json) { log.info("extracting realization: {}", json); Realization realization = new Realization(); JSONObject object = new JSONObject(json); realization.setExperimentId(object.getLong("experimentId")); realization.setExperimentName(object.getString("experimentName")); realization.setUserId(object.getString("userId")); realization.setTeamId(object.getString(TEAM_ID)); realization.setState(object.getString("state")); String exp_report = ""; Object expDetailsObject = object.get("details"); log.info("exp detail object: {}", expDetailsObject); if (expDetailsObject == JSONObject.NULL || expDetailsObject.toString().isEmpty()) { log.info("set details empty"); realization.setDetails(""); realization.setNumberOfNodes(0); } else { log.info("exp report to string: {}", expDetailsObject.toString()); exp_report = expDetailsObject.toString(); realization.setDetails(exp_report); JSONObject nodesInfoObject = new JSONObject(expDetailsObject.toString()); for (Object key : nodesInfoObject.keySet()) { Map<String, String> nodeDetails = new HashMap<>(); String nodeName = (String) key; JSONObject nodeDetailsJson = new JSONObject(nodesInfoObject.get(nodeName).toString()); nodeDetails.put("os", getValueFromJSONKey(nodeDetailsJson, "os")); nodeDetails.put("qualifiedName", getValueFromJSONKey(nodeDetailsJson, "qualifiedName")); nodeDetails.put(NODE_ID, getValueFromJSONKey(nodeDetailsJson, NODE_ID)); realization.addNodeDetails(nodeName, nodeDetails); } log.info("nodes info object: {}", nodesInfoObject); realization.setNumberOfNodes(nodesInfoObject.keySet().size()); } return realization; } // gets the value that corresponds to a particular key // checks if a particular key in the JSONObject exists // returns the value if the key exists, otherwise, returns N.A. private String getValueFromJSONKey(JSONObject json, String key) { if (json.has(key)) { return json.get(key).toString(); } return NOT_APPLICABLE; } /** * @param zonedDateTimeJSON JSON string * @return a date in the format MMM-d-yyyy */ protected String formatZonedDateTime(String zonedDateTimeJSON) throws Exception { ZonedDateTime zonedDateTime = getZonedDateTime(zonedDateTimeJSON); DateTimeFormatter format = DateTimeFormatter.ofPattern("MMM-d-yyyy"); return zonedDateTime.format(format); } protected ZonedDateTime getZonedDateTime(String zonedDateTimeJSON) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new JavaTimeModule()); return mapper.readValue(zonedDateTimeJSON, ZonedDateTime.class); } /** * Creates a HttpEntity with a request body and header but no authorization header * To solve the expired jwt token * * @param jsonString The JSON request converted to string * @return A HttpEntity request * @see HttpEntity createHttpEntityHeaderOnly() for request with only header */ protected HttpEntity<String> createHttpEntityWithBodyNoAuthHeader(String jsonString) { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); return new HttpEntity<>(jsonString, headers); } /** * Creates a HttpEntity that contains only a header and empty body but no authorization header * To solve the expired jwt token * * @return A HttpEntity request * @see HttpEntity createHttpEntityWithBody() for request with both body and header */ protected HttpEntity<String> createHttpEntityHeaderOnlyNoAuthHeader() { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); return new HttpEntity<>(headers); } /** * Creates a HttpEntity with a request body and header * * @param jsonString The JSON request converted to string * @return A HttpEntity request * @implNote Authorization header must be set to the JwTToken in the format [Bearer: TOKEN_ID] * @see HttpEntity createHttpEntityHeaderOnly() for request with only header */ protected HttpEntity<String> createHttpEntityWithBody(String jsonString) { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); headers.set("Authorization", httpScopedSession.getAttribute(webProperties.getSessionJwtToken()).toString()); return new HttpEntity<>(jsonString, headers); } /** * Creates a HttpEntity that contains only a header and empty body * * @return A HttpEntity request * @implNote Authorization header must be set to the JwTToken in the format [Bearer: TOKEN_ID] * @see HttpEntity createHttpEntityWithBody() for request with both body and header */ protected HttpEntity<String> createHttpEntityHeaderOnly() { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); headers.set("Authorization", httpScopedSession.getAttribute(webProperties.getSessionJwtToken()).toString()); return new HttpEntity<>(headers); } private void setSessionVariables(HttpSession session, String loginEmail, String id, String firstName, String userRoles, String token) { User2 user = invokeAndExtractUserInfo(id); session.setAttribute(webProperties.getSessionEmail(), loginEmail); session.setAttribute(webProperties.getSessionUserId(), id); session.setAttribute(webProperties.getSessionUserFirstName(), firstName); session.setAttribute(webProperties.getSessionRoles(), userRoles); session.setAttribute(webProperties.getSessionJwtToken(), "Bearer " + token); log.info("Session variables - sessionLoggedEmail: {}, id: {}, name: {}, roles: {}, token: {}", loginEmail, id, user.getFirstName(), userRoles, token); } private void removeSessionVariables(HttpSession session) { log.info("removing session variables: email: {}, userid: {}, user first name: {}", session.getAttribute(webProperties.getSessionEmail()), session.getAttribute(webProperties.getSessionUserId()), session.getAttribute(webProperties.getSessionUserFirstName())); session.removeAttribute(webProperties.getSessionEmail()); session.removeAttribute(webProperties.getSessionUserId()); session.removeAttribute(webProperties.getSessionUserFirstName()); session.removeAttribute(webProperties.getSessionRoles()); session.removeAttribute(webProperties.getSessionJwtToken()); session.invalidate(); } protected boolean validateIfAdmin(HttpSession session) { //log.info("User: {} is logged on as: {}", session.getAttribute(webProperties.getSessionEmail()), session.getAttribute(webProperties.getSessionRoles())); return session.getAttribute(webProperties.getSessionRoles()).equals(UserType.ADMIN.toString()); } /** * Ensure that only users of the team can realize or un-realize experiment * A pre-condition is that the users must be approved. * Teams must also be approved. * * @return the main experiment page */ private boolean checkPermissionRealizeExperiment(Realization realization, HttpSession session) { // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); if (teamId.equals(realization.getTeamId())) { return true; } } return false; } private String getTeamStatus(String teamId) { Team2 team = invokeAndExtractTeamInfo(teamId); return team.getStatus(); } private Realization getCleanRealization() { Realization realization = new Realization(); realization.setExperimentId(0L); realization.setExperimentName(""); realization.setUserId(""); realization.setTeamId(""); realization.setState(RealizationState.ERROR.toString()); realization.setDetails(""); realization.setNumberOfNodes(0); return realization; } /** * Computes the number of teams that the user is in and the number of running experiments to populate data for the user dashboard * * @return a map in the form teams: numberOfTeams, experiments: numberOfExperiments */ private Map<String, Integer> getUserDashboardStats(String userId) { int numberOfRunningExperiments = 0; Map<String, Integer> userDashboardStats = new HashMap<>(); // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); int numberOfApprovedTeam = 0; for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); if (!isMemberJoinRequestPending(userId, teamResponseBody)) { // get experiments lists of the teams HttpEntity<String> expRequest = createHttpEntityHeaderOnly(); ResponseEntity expRespEntity = restTemplate.exchange(properties.getExpListByTeamId(teamId), HttpMethod.GET, expRequest, String.class); JSONArray experimentsArray = new JSONArray(expRespEntity.getBody().toString()); numberOfRunningExperiments = getNumberOfRunningExperiments(numberOfRunningExperiments, experimentsArray); numberOfApprovedTeam ++; } } userDashboardStats.put(USER_DASHBOARD_APPROVED_TEAMS, numberOfApprovedTeam); userDashboardStats.put(USER_DASHBOARD_RUNNING_EXPERIMENTS, numberOfRunningExperiments); // userDashboardStats.put(USER_DASHBOARD_FREE_NODES, getNodes(NodeType.FREE)); return userDashboardStats; } private int getNumberOfRunningExperiments(int numberOfRunningExperiments, JSONArray experimentsArray) { for (int k = 0; k < experimentsArray.length(); k++) { Experiment2 experiment2 = extractExperiment(experimentsArray.getJSONObject(k).toString()); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); if (realization.getState().equals(RealizationState.RUNNING.toString())) { numberOfRunningExperiments++; } } return numberOfRunningExperiments; } private SortedMap<String, Map<String, String>> getGlobalImages() throws IOException { SortedMap<String, Map<String, String>> globalImagesMap = new TreeMap<>(); log.info("Retrieving list of global images from: {}", properties.getGlobalImages()); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getGlobalImages(), HttpMethod.GET, request, String.class); ObjectMapper mapper = new ObjectMapper(); String json = new JSONObject(response.getBody().toString()).getString("images"); globalImagesMap = mapper.readValue(json, new TypeReference<SortedMap<String, Map<String, String>>>() { }); } catch (RestClientException e) { log.warn("Error connecting to service-image: {}", e); } return globalImagesMap; } private int getNodes(NodeType nodeType) { String nodesCount; log.info("Retrieving number of " + nodeType + " nodes from: {}", properties.getNodes(nodeType)); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getNodes(nodeType), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(response.getBody().toString()); nodesCount = object.getString(nodeType.name()); } catch (RestClientException e) { log.warn(ERROR_CONNECTING_TO_SERVICE_TELEMETRY, e); nodesCount = "0"; } return Integer.parseInt(nodesCount); } private List<TeamUsageInfo> getTeamsUsageStatisticsForUser(String userId) { List<TeamUsageInfo> usageInfoList = new ArrayList<>(); // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); // get team info by team id for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); if (!isMemberJoinRequestPending(userId, teamResponseBody)) { TeamUsageInfo usageInfo = new TeamUsageInfo(); usageInfo.setId(teamId); usageInfo.setName(new JSONObject(teamResponseBody).getString("name")); usageInfo.setUsage(getUsageStatisticsByTeamId(teamId)); usageInfoList.add(usageInfo); } } return usageInfoList; } private String getUsageStatisticsByTeamId(String id) { log.info("Getting usage statistics for team {}", id); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response; try { response = restTemplate.exchange(properties.getUsageStat(id), HttpMethod.GET, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio get usage statistics {}", e); return "?"; } return response.getBody().toString(); } private TeamQuota extractTeamQuotaInfo(String responseBody) { JSONObject object = new JSONObject(responseBody); TeamQuota teamQuota = new TeamQuota(); Double charges = Double.parseDouble(accountingProperties.getCharges()); // amountUsed from SIO will never be null => not checking for null value String usage = object.getString("usage"); // getting usage in String BigDecimal amountUsed = new BigDecimal(usage); // using BigDecimal to handle currency amountUsed = amountUsed.multiply(new BigDecimal(charges)); // usage X charges //quota passed from SIO can be null , so we have to check for null value if (object.has("quota")) { Object budgetObject = object.optString("quota", null); if (budgetObject == null) { teamQuota.setBudget(""); // there is placeholder here teamQuota.setResourcesLeft("Unlimited"); // not placeholder so can pass string over } else { Double budgetInDouble = object.getDouble("quota"); // retrieve budget from SIO in Double BigDecimal budgetInBD = BigDecimal.valueOf(budgetInDouble); // handling currency using BigDecimal // calculate resoucesLeft BigDecimal resourceLeftInBD = budgetInBD.subtract(amountUsed); resourceLeftInBD = resourceLeftInBD.divide(new BigDecimal(charges), 0, BigDecimal.ROUND_DOWN); budgetInBD = budgetInBD.setScale(2, BigDecimal.ROUND_HALF_UP); // set budget teamQuota.setBudget(budgetInBD.toString()); //set resroucesLeft if (resourceLeftInBD.compareTo(BigDecimal.valueOf(0)) < 0) teamQuota.setResourcesLeft("0"); else teamQuota.setResourcesLeft(resourceLeftInBD.toString()); } } //set teamId and amountUsed teamQuota.setTeamId(object.getString(TEAM_ID)); amountUsed = amountUsed.setScale(2, BigDecimal.ROUND_HALF_UP); teamQuota.setAmountUsed(amountUsed.toString()); return teamQuota; } /** * Invokes the get nodes status in the telemetry service * @return a map containing a list of nodes status by their type */ private Map<String, List<Map<String, String>>> getNodesStatus() throws IOException { log.info("Getting all nodes' status from: {}", properties.getNodesStatus()); Map<String, List<Map<String, String>>> output = new HashMap<>(); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getNodesStatus(), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(response.getBody().toString()); if (object == JSONObject.NULL || object.length() == 0) { return output; } else { // loop through the object as there may be more than one machine type for (int i = 0; i < object.names().length(); i++) { // for each machine type, get all the current nodes status String currentMachineType = object.names().getString(i); // converts the JSON Array of the form [ { id : A, status : B, type : C } ] into a proper list of map List<Map<String, String>> nodesList = objectMapper.readValue(object.getJSONArray(currentMachineType).toString(), new TypeReference<List<Map>>(){}); output.put(currentMachineType, nodesList); } } } catch (RestClientException e) { log.warn(ERROR_CONNECTING_TO_SERVICE_TELEMETRY, e); return new HashMap<>(); } log.info("Finish getting all nodes: {}", output); return output; } private Map<String,String> getTestbedStats() { Map<String, String> statsMap = new HashMap<>(); log.info("Retrieving number of logged in users and running experiments from: {}", properties.getTestbedStats()); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getTestbedStats(), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(response.getBody().toString()); statsMap.put(USER_DASHBOARD_LOGGED_IN_USERS_COUNT, object.getString("users")); statsMap.put(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT, object.getString("experiments")); } catch (RestClientException e) { log.warn(ERROR_CONNECTING_TO_SERVICE_TELEMETRY, e); statsMap.put(USER_DASHBOARD_LOGGED_IN_USERS_COUNT, "0"); statsMap.put(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT, "0"); } return statsMap; } }
src/main/java/sg/ncl/MainController.java
package sg.ncl; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.apache.tomcat.util.codec.binary.Base64; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.*; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.ResourceAccessException; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import org.springframework.web.servlet.support.RequestContextUtils; import sg.ncl.domain.*; import sg.ncl.exceptions.*; import sg.ncl.testbed_interface.*; import sg.ncl.testbed_interface.Image; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.validation.Valid; import javax.validation.constraints.NotNull; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.*; import java.util.List; import java.util.regex.Pattern; import static java.time.temporal.TemporalAdjusters.firstDayOfMonth; import static java.time.temporal.TemporalAdjusters.lastDayOfMonth; import static sg.ncl.domain.ExceptionState.*; /** * * Spring Controller * Direct the views to appropriate locations and invoke the respective REST API * * @author Cassie, Desmond, Te Ye, Vu */ @Controller @Slf4j public class MainController { public static final String CONTENT_DISPOSITION = "Content-Disposition"; public static final String APPLICATION_FORCE_DOWNLOAD = "application/force-download"; private static final String SESSION_LOGGED_IN_USER_ID = "loggedInUserId"; private TeamManager teamManager = TeamManager.getInstance(); // private UserManager userManager = UserManager.getInstance(); // private ExperimentManager experimentManager = ExperimentManager.getInstance(); // private DomainManager domainManager = DomainManager.getInstance(); // private DatasetManager datasetManager = DatasetManager.getInstance(); // private NodeManager nodeManager = NodeManager.getInstance(); private static final String CONTACT_EMAIL = "[email protected]"; private static final String UNKNOWN = "?"; private static final String MESSAGE = "message"; private static final String MESSAGE_SUCCESS = "messageSuccess"; private static final String EXPERIMENT_MESSAGE = "exp_message"; private static final String ERROR_PREFIX = "Error: "; // error messages private static final String ERROR_CONNECTING_TO_SERVICE_TELEMETRY = "Error connecting to service-telemetry: {}"; private static final String ERR_SERVER_OVERLOAD = "There is a problem with your request. Please contact " + CONTACT_EMAIL; private static final String CONNECTION_ERROR = "Connection Error"; private final String permissionDeniedMessage = "Permission denied. If the error persists, please contact " + CONTACT_EMAIL; private static final String ERR_START_DATE_AFTER_END_DATE = "End date must be after start date"; // for user dashboard hashmap key values private static final String USER_DASHBOARD_APPROVED_TEAMS = "numberOfApprovedTeam"; private static final String USER_DASHBOARD_RUNNING_EXPERIMENTS = "numberOfRunningExperiments"; private static final String USER_DASHBOARD_FREE_NODES = "freeNodes"; private static final String USER_DASHBOARD_TOTAL_NODES = "totalNodes"; private static final String USER_DASHBOARD_GLOBAL_IMAGES = "globalImagesMap"; private static final String USER_DASHBOARD_LOGGED_IN_USERS_COUNT = "loggedInUsersCount"; private static final String USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT = "runningExperimentsCount"; private static final String DETER_UID = "deterUid"; private static final Pattern VALID_EMAIL_ADDRESS_REGEX = Pattern.compile("(?:(?:\\r\\n)?[ \\t])*(?:(?:(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*\\<(?:(?:\\r\\n)?[ \\t])*(?:@(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*(?:,@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*)*:(?:(?:\\r\\n)?[ \\t])*)?(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*\\>(?:(?:\\r\\n)?[ \\t])*)|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*:(?:(?:\\r\\n)?[ \\t])*(?:(?:(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*\\<(?:(?:\\r\\n)?[ \\t])*(?:@(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*(?:,@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*)*:(?:(?:\\r\\n)?[ \\t])*)?(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*\\>(?:(?:\\r\\n)?[ \\t])*)(?:,\\s*(?:(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*|(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)*\\<(?:(?:\\r\\n)?[ \\t])*(?:@(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*(?:,@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*)*:(?:(?:\\r\\n)?[ \\t])*)?(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\"(?:[^\\\"\\r\\\\]|\\\\.|(?:(?:\\r\\n)?[ \\t]))*\"(?:(?:\\r\\n)?[ \\t])*))*@(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*)(?:\\.(?:(?:\\r\\n)?[ \\t])*(?:[^()<>@,;:\\\\\".\\[\\] \\000-\\031]+(?:(?:(?:\\r\\n)?[ \\t])+|\\Z|(?=[\\[\"()<>@,;:\\\\\".\\[\\]]))|\\[([^\\[\\]\\r\\\\]|\\\\.)*\\](?:(?:\\r\\n)?[ \\t])*))*\\>(?:(?:\\r\\n)?[ \\t])*))*)?;\\s*)"); private static final String FORGET_PSWD_PAGE = "password_reset_email"; private static final String FORGET_PSWD_NEW_PSWD_PAGE = "password_reset_new_password"; private static final String NO_PERMISSION_PAGE = "nopermission"; private static final String EXPERIMENTS = "experiments"; private static final String APPLICATION_DATE = "applicationDate"; private static final String TEAM_NAME = "teamName"; private static final String TEAM_ID = "teamId"; private static final String NODE_ID = "nodeId"; private static final String PERMISSION_DENIED = "Permission denied"; private static final String TEAM_NOT_FOUND = "Team not found"; private static final String NOT_FOUND = " not found."; private static final String EDIT_BUDGET = "editBudget"; private static final String ORIGINAL_BUDGET = "originalBudget"; private static final String REDIRECT_TEAM_PROFILE_TEAM_ID = "redirect:/team_profile/{teamId}"; private static final String REDIRECT_TEAM_PROFILE = "redirect:/team_profile/"; private static final String REDIRECT_INDEX_PAGE = "redirect:/"; private static final String REDIRECT_ENERGY_USAGE = "redirect:/energy_usage"; // remove members from team profile; to display the list of experiments created by user private static final String REMOVE_MEMBER_UID = "removeMemberUid"; private static final String REMOVE_MEMBER_NAME = "removeMemberName"; private static final String MEMBER_TYPE = "memberType"; // admin update data resource to track what fields have been updated private static final String ORIGINAL_DATARESOURCE = "original_dataresource"; private static final String NOT_APPLICABLE = "N.A."; @Autowired protected RestTemplate restTemplate; @Inject protected ObjectMapper objectMapper; @Inject protected ConnectionProperties properties; @Inject protected WebProperties webProperties; @Inject protected AccountingProperties accountingProperties; @Inject protected HttpSession httpScopedSession; @RequestMapping("/") public String index() { return "index"; } @RequestMapping("/overview") public String overview() { return "overview"; } @RequestMapping("/community") public String community() { return "community"; } @RequestMapping("/about") public String about() { return "about"; } @RequestMapping("/event") public String event() { return "event"; } @RequestMapping("/plan") public String plan() { return "plan"; } @RequestMapping("/career") public String career() { return "career"; } @RequestMapping("/pricing") public String pricing() { return "pricing"; } @RequestMapping("/resources") public String resources() { return "resources"; } @RequestMapping("/research") public String research() { return "research"; } @RequestMapping("/calendar") public String calendar() { return "calendar"; } @RequestMapping("/tutorials/createaccount") public String createAccount() { return "createaccount"; } @RequestMapping("/tutorials/createexperiment") public String createExperimentTutorial() { return "createexperiment"; } @RequestMapping("/tutorials/loadimage") public String loadimage() { return "loadimage"; } @RequestMapping("/tutorials/saveimage") public String saveimage() { return "saveimage"; } @RequestMapping("/tutorials/applyteam") public String applyteam() { return "applyteam"; } @RequestMapping("/tutorials/jointeam") public String jointeam() { return "jointeam"; } @RequestMapping("/tutorials/usenode") public String usenode() { return "usenode"; } @RequestMapping("/tutorials/usessh") public String usessh() { return "usessh"; } @RequestMapping("/tutorials/usescp") public String usescp() { return "usescp"; } @RequestMapping("/tutorials/usegui") public String usegui() { return "usegui"; } @RequestMapping("/tutorials/manageresource") public String manageresource() { return "manageresource"; } @RequestMapping("/tutorials/testbedinfo") public String testbedinfo() { return "testbedinfo"; } @RequestMapping("/tutorials/createcustom") public String createcustom() { return "createcustom"; } @RequestMapping("/error_openstack") public String error_openstack() { return "error_openstack"; } @RequestMapping("/accessexperiment") public String accessexperiment() { return "accessexperiment"; } @RequestMapping("/resource2") public String resource2() { return "resource2"; } @RequestMapping("/tutorials") public String tutorials() { return "tutorials"; } @RequestMapping("/maintainance") public String maintainance() { return "maintainance"; } @RequestMapping("/testbedInformation") public String testbedInformation(Model model) throws IOException { model.addAttribute(USER_DASHBOARD_GLOBAL_IMAGES, getGlobalImages()); return "testbed_information"; } // get all the nodes' status // there are three types of status // "free" : node is free // "in_use" : node is in use // "reload" : node is in process of freeing or unknown status // "reserved" : node is pre-reserved for a project @RequestMapping("/testbedNodesStatus") public String testbedNodesStatus(Model model) throws IOException { // get number of active users and running experiments Map<String, String> testbedStatsMap = getTestbedStats(); testbedStatsMap.put(USER_DASHBOARD_FREE_NODES, "0"); testbedStatsMap.put(USER_DASHBOARD_TOTAL_NODES, "0"); Map<String, List<Map<String, String>>> nodesStatus = getNodesStatus(); Map<String, Map<String, Long>> nodesStatusCount = new HashMap<>(); // loop through each of the machine type // tabulate the different nodes type // count the number of different nodes status, e.g. SYSTEMX = { FREE = 10, IN_USE = 11, ... } nodesStatus.entrySet().forEach(machineTypeListEntry -> { Map<String, Long> nodesCountMap = new HashMap<>(); long free = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "free".equalsIgnoreCase(stringStringMap.get("status"))).count(); long inUse = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "in_use".equalsIgnoreCase(stringStringMap.get("status"))).count(); long reserved = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "reserved".equalsIgnoreCase(stringStringMap.get("status"))).count(); long reload = machineTypeListEntry.getValue().stream().filter(stringStringMap -> "reload".equalsIgnoreCase(stringStringMap.get("status"))).count(); long total = free + inUse + reserved + reload; long currentTotal = Long.parseLong(testbedStatsMap.get(USER_DASHBOARD_TOTAL_NODES)) + total; long currentFree = Long.parseLong(testbedStatsMap.get(USER_DASHBOARD_FREE_NODES)) + free; nodesCountMap.put(NodeType.FREE.name(), free); nodesCountMap.put(NodeType.IN_USE.name(), inUse); nodesCountMap.put(NodeType.RESERVED.name(), reserved); nodesCountMap.put(NodeType.RELOADING.name(), reload); nodesStatusCount.put(machineTypeListEntry.getKey(), nodesCountMap); testbedStatsMap.put(USER_DASHBOARD_FREE_NODES, Long.toString(currentFree)); testbedStatsMap.put(USER_DASHBOARD_TOTAL_NODES, Long.toString(currentTotal)); }); model.addAttribute("nodesStatus", nodesStatus); model.addAttribute("nodesStatusCount", nodesStatusCount); model.addAttribute(USER_DASHBOARD_LOGGED_IN_USERS_COUNT, testbedStatsMap.get(USER_DASHBOARD_LOGGED_IN_USERS_COUNT)); model.addAttribute(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT, testbedStatsMap.get(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT)); model.addAttribute(USER_DASHBOARD_FREE_NODES, testbedStatsMap.get(USER_DASHBOARD_FREE_NODES)); model.addAttribute(USER_DASHBOARD_TOTAL_NODES, testbedStatsMap.get(USER_DASHBOARD_TOTAL_NODES)); return "testbed_nodes_status"; } @RequestMapping(value = "/orderform/download", method = RequestMethod.GET) public void OrderForm_v1Download(HttpServletResponse response) throws OrderFormDownloadException, IOException { InputStream stream = null; response.setContentType(MediaType.APPLICATION_PDF_VALUE); try { stream = getClass().getClassLoader().getResourceAsStream("downloads/order_form.pdf"); response.setContentType(APPLICATION_FORCE_DOWNLOAD); response.setHeader(CONTENT_DISPOSITION, "attachment; filename=order_form.pdf"); IOUtils.copy(stream, response.getOutputStream()); response.flushBuffer(); } catch (IOException ex) { log.info("Error for download orderform."); throw new OrderFormDownloadException("Error for download orderform."); } finally { if (stream != null) { stream.close(); } } } @RequestMapping("/contactus") public String contactus() { return "contactus"; } @RequestMapping("/notfound") public String redirectNotFound(HttpSession session) { if (session.getAttribute("id") != null && !session.getAttribute("id").toString().isEmpty()) { // user is already logged on and has encountered an error // redirect to dashboard return "redirect:/dashboard"; } else { // user have not logged on before // redirect to home page return REDIRECT_INDEX_PAGE; } } @RequestMapping(value = "/login", method = RequestMethod.GET) public String login(Model model) { model.addAttribute("loginForm", new LoginForm()); return "login"; } @RequestMapping(value = "/emailVerification", params = {"id", "email", "key"}) public String verifyEmail( @NotNull @RequestParam("id") final String id, @NotNull @RequestParam("email") final String emailBase64, @NotNull @RequestParam("key") final String key ) throws UnsupportedEncodingException { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); ObjectNode keyObject = objectMapper.createObjectNode(); keyObject.put("key", key); HttpEntity<String> request = new HttpEntity<>(keyObject.toString(), headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); final String link = properties.getSioRegUrl() + "/users/" + id + "/emails/" + emailBase64; log.info("Activation link: {}, verification key {}", link, key); ResponseEntity response = restTemplate.exchange(link, HttpMethod.PUT, request, String.class); if (RestUtil.isError(response.getStatusCode())) { log.error("Activation of user {} failed.", id); return "email_validation_failed"; } else { log.info("Activation of user {} completed.", id); return "email_validation_ok"; } } @RequestMapping(value = "/login", method = RequestMethod.POST) public String loginSubmit( @Valid @ModelAttribute("loginForm") LoginForm loginForm, BindingResult bindingResult, Model model, HttpSession session, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { if (bindingResult.hasErrors()) { loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } String inputEmail = loginForm.getLoginEmail(); String inputPwd = loginForm.getLoginPassword(); if (inputEmail.trim().isEmpty() || inputPwd.trim().isEmpty()) { loginForm.setErrorMsg("Email or Password cannot be empty!"); return "login"; } String plainCreds = inputEmail + ":" + inputPwd; byte[] plainCredsBytes = plainCreds.getBytes(); byte[] base64CredsBytes = Base64.encodeBase64(plainCredsBytes); String base64Creds = new String(base64CredsBytes); ResponseEntity response; HttpHeaders headers = new HttpHeaders(); headers.set("Authorization", "Basic " + base64Creds); HttpEntity<String> request = new HttpEntity<>(headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); try { response = restTemplate.exchange(properties.getSioAuthUrl(), HttpMethod.POST, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio authentication service: {}", e); loginForm.setErrorMsg(ERR_SERVER_OVERLOAD); return "login"; } String jwtTokenString = response.getBody().toString(); log.info("token string {}", jwtTokenString); if (jwtTokenString == null || jwtTokenString.isEmpty()) { log.warn("login failed for {}: unknown response code", loginForm.getLoginEmail()); loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } if (RestUtil.isError(response.getStatusCode())) { try { MyErrorResource error = objectMapper.readValue(jwtTokenString, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); if (exceptionState == ExceptionState.CREDENTIALS_NOT_FOUND_EXCEPTION) { log.warn("login failed for {}: credentials not found", loginForm.getLoginEmail()); loginForm.setErrorMsg("Login failed: Account does not exist. Please register."); return "login"; } log.warn("login failed for {}: {}", loginForm.getLoginEmail(), error.getError()); loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } catch (IOException ioe) { log.warn("IOException {}", ioe); throw new WebServiceRuntimeException(ioe.getMessage()); } } JSONObject tokenObject = new JSONObject(jwtTokenString); String token = tokenObject.getString("token"); String id = tokenObject.getString("id"); String role = ""; if (tokenObject.getJSONArray("roles") != null) { role = tokenObject.getJSONArray("roles").get(0).toString(); } if (token.trim().isEmpty() || id.trim().isEmpty() || role.trim().isEmpty()) { log.warn("login failed for {}: empty id {} or token {} or role {}", loginForm.getLoginEmail(), id, token, role); loginForm.setErrorMsg("Login failed: Invalid email/password."); return "login"; } // now check user status to decide what to show to the user User2 user = invokeAndExtractUserInfo(id); try { String userStatus = user.getStatus(); boolean emailVerified = user.getEmailVerified(); if (UserStatus.FROZEN.toString().equals(userStatus)) { log.warn("User {} login failed: account has been frozen", id); loginForm.setErrorMsg("Login Failed: Account Frozen. Please contact " + CONTACT_EMAIL); return "login"; } else if (!emailVerified || (UserStatus.CREATED.toString()).equals(userStatus)) { redirectAttributes.addAttribute("statuschecklist", userStatus); log.info("User {} not validated, redirected to email verification page", id); return "redirect:/email_checklist"; } else if ((UserStatus.PENDING.toString()).equals(userStatus)) { redirectAttributes.addAttribute("statuschecklist", userStatus); log.info("User {} not approved, redirected to application pending page", id); return "redirect:/email_checklist"; } else if ((UserStatus.APPROVED.toString()).equals(userStatus)) { // set session variables setSessionVariables(session, loginForm.getLoginEmail(), id, user.getFirstName(), role, token); log.info("login success for {}, id: {}", loginForm.getLoginEmail(), id); return "redirect:/dashboard"; } else { log.warn("login failed for user {}: account is rejected or closed", id); loginForm.setErrorMsg("Login Failed: Account Rejected/Closed."); return "login"; } } catch (Exception e) { log.warn("Error parsing json object for user: {}", e.getMessage()); loginForm.setErrorMsg(ERR_SERVER_OVERLOAD); return "login"; } } // triggered when user clicks "Forget Password?" @RequestMapping("/password_reset_email") public String passwordResetEmail(Model model) { model.addAttribute("passwordResetRequestForm", new PasswordResetRequestForm()); return FORGET_PSWD_PAGE; } // triggered when user clicks "Send Reset Email" button @PostMapping("/password_reset_request") public String sendPasswordResetRequest( @ModelAttribute("passwordResetRequestForm") PasswordResetRequestForm passwordResetRequestForm ) throws WebServiceRuntimeException { String email = passwordResetRequestForm.getEmail(); if (!VALID_EMAIL_ADDRESS_REGEX.matcher(email).matches()) { passwordResetRequestForm.setErrMsg("Please provide a valid email address"); return FORGET_PSWD_PAGE; } JSONObject obj = new JSONObject(); obj.put("username", email); log.info("Connecting to sio for password reset email: {}", email); HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); HttpEntity<String> request = new HttpEntity<>(obj.toString(), headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = null; try { response = restTemplate.exchange(properties.getPasswordResetRequestURI(), HttpMethod.POST, request, String.class); } catch (RestClientException e) { log.warn("Cannot connect to sio for password reset email: {}", e); passwordResetRequestForm.setErrMsg("Cannot connect. Server may be down!"); return FORGET_PSWD_PAGE; } if (RestUtil.isError(response.getStatusCode())) { log.warn("Server responded error for password reset email: {}", response.getStatusCode()); passwordResetRequestForm.setErrMsg("Email not registered. Please use a different email address."); return FORGET_PSWD_PAGE; } log.info("Password reset email sent for {}", email); return "password_reset_email_sent"; } // triggered when user clicks password reset link in the email @RequestMapping(path = "/passwordReset", params = {"key"}) public String passwordResetNewPassword(@NotNull @RequestParam("key") final String key, Model model) { PasswordResetForm form = new PasswordResetForm(); form.setKey(key); model.addAttribute("passwordResetForm", form); // redirect to the page for user to enter new password return FORGET_PSWD_NEW_PSWD_PAGE; } // actual call to sio to reset password @RequestMapping(path = "/password_reset") public String resetPassword(@ModelAttribute("passwordResetForm") PasswordResetForm passwordResetForm) throws IOException { if (!passwordResetForm.isPasswordOk()) { return FORGET_PSWD_NEW_PSWD_PAGE; } JSONObject obj = new JSONObject(); obj.put("key", passwordResetForm.getKey()); obj.put("new", passwordResetForm.getPassword1()); log.info("Connecting to sio for password reset, key = {}", passwordResetForm.getKey()); HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); HttpEntity<String> request = new HttpEntity<>(obj.toString(), headers); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = null; try { response = restTemplate.exchange(properties.getPasswordResetURI(), HttpMethod.PUT, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio for password reset! {}", e); passwordResetForm.setErrMsg("Cannot connect to server! Please try again later."); return FORGET_PSWD_NEW_PSWD_PAGE; } if (RestUtil.isError(response.getStatusCode())) { EnumMap<ExceptionState, String> exceptionMessageMap = new EnumMap<>(ExceptionState.class); exceptionMessageMap.put(PASSWORD_RESET_REQUEST_TIMEOUT_EXCEPTION, "Password reset request timed out. Please request a new reset email."); exceptionMessageMap.put(PASSWORD_RESET_REQUEST_NOT_FOUND_EXCEPTION, "Invalid password reset request. Please request a new reset email."); exceptionMessageMap.put(ADAPTER_CONNECTION_EXCEPTION, "Server-side error. Please contact " + CONTACT_EMAIL); MyErrorResource error = objectMapper.readValue(response.getBody().toString(), MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); final String errMsg = exceptionMessageMap.get(exceptionState) == null ? ERR_SERVER_OVERLOAD : exceptionMessageMap.get(exceptionState); passwordResetForm.setErrMsg(errMsg); log.warn("Server responded error for password reset: {}", exceptionState.toString()); return FORGET_PSWD_NEW_PSWD_PAGE; } log.info("Password was reset, key = {}", passwordResetForm.getKey()); return "password_reset_success"; } @RequestMapping("/dashboard") public String dashboard(Model model, HttpSession session) throws WebServiceRuntimeException { HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getDeterUid(session.getAttribute(webProperties.getSessionUserId()).toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { log.error("No user exists : {}", session.getAttribute(webProperties.getSessionUserId())); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); model.addAttribute(DETER_UID, CONNECTION_ERROR); } else { log.info("Show the deter user id: {}", responseBody); model.addAttribute(DETER_UID, responseBody); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } // retrieve user dashboard stats Map<String, Integer> userDashboardMap = getUserDashboardStats(session.getAttribute(webProperties.getSessionUserId()).toString()); List<TeamUsageInfo> usageInfoList = getTeamsUsageStatisticsForUser(session.getAttribute(webProperties.getSessionUserId()).toString()); model.addAttribute("userDashboardMap", userDashboardMap); model.addAttribute("usageInfoList", usageInfoList); return "dashboard"; } @RequestMapping(value = "/logout", method = RequestMethod.GET) public String logout(HttpSession session) { removeSessionVariables(session); return REDIRECT_INDEX_PAGE; } //--------------------------Sign Up Page-------------------------- @RequestMapping(value = "/signup2", method = RequestMethod.GET) public String signup2(Model model, HttpServletRequest request) { Map<String, ?> inputFlashMap = RequestContextUtils.getInputFlashMap(request); if (inputFlashMap != null) { log.debug((String) inputFlashMap.get(MESSAGE)); model.addAttribute("signUpMergedForm", (SignUpMergedForm) inputFlashMap.get("signUpMergedForm")); } else { log.debug("InputFlashMap is null"); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); } return "signup2"; } @RequestMapping(value = "/signup2", method = RequestMethod.POST) public String validateDetails( @Valid @ModelAttribute("signUpMergedForm") SignUpMergedForm signUpMergedForm, BindingResult bindingResult, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { if (bindingResult.hasErrors() || signUpMergedForm.getIsValid() == false) { log.warn("Register form has errors {}", signUpMergedForm.toString()); return "signup2"; } if (!signUpMergedForm.getHasAcceptTeamOwnerPolicy()) { signUpMergedForm.setErrorTeamOwnerPolicy("Please accept the team owner policy"); log.warn("Policy not accepted"); return "signup2"; } // get form fields // craft the registration json JSONObject mainObject = new JSONObject(); JSONObject credentialsFields = new JSONObject(); credentialsFields.put("username", signUpMergedForm.getEmail().trim()); credentialsFields.put("password", signUpMergedForm.getPassword()); // create the user JSON JSONObject userFields = new JSONObject(); JSONObject userDetails = new JSONObject(); JSONObject addressDetails = new JSONObject(); userDetails.put("firstName", signUpMergedForm.getFirstName().trim()); userDetails.put("lastName", signUpMergedForm.getLastName().trim()); userDetails.put("jobTitle", signUpMergedForm.getJobTitle().trim()); userDetails.put("email", signUpMergedForm.getEmail().trim()); userDetails.put("phone", signUpMergedForm.getPhone().trim()); userDetails.put("institution", signUpMergedForm.getInstitution().trim()); userDetails.put("institutionAbbreviation", signUpMergedForm.getInstitutionAbbreviation().trim()); userDetails.put("institutionWeb", signUpMergedForm.getWebsite().trim()); userDetails.put("address", addressDetails); addressDetails.put("address1", signUpMergedForm.getAddress1().trim()); addressDetails.put("address2", signUpMergedForm.getAddress2().trim()); addressDetails.put("country", signUpMergedForm.getCountry().trim()); addressDetails.put("region", signUpMergedForm.getProvince().trim()); addressDetails.put("city", signUpMergedForm.getCity().trim()); addressDetails.put("zipCode", signUpMergedForm.getPostalCode().trim()); userFields.put("userDetails", userDetails); userFields.put(APPLICATION_DATE, ZonedDateTime.now()); JSONObject teamFields = new JSONObject(); // add all to main json mainObject.put("credentials", credentialsFields); mainObject.put("user", userFields); mainObject.put("team", teamFields); // check if user chose create new team or join existing team by checking team name String createNewTeamName = signUpMergedForm.getTeamName().trim(); String joinNewTeamName = signUpMergedForm.getJoinTeamName().trim(); if (createNewTeamName != null && !createNewTeamName.isEmpty()) { log.info("Signup new team name {}", createNewTeamName); boolean errorsFound = false; if (createNewTeamName.length() < 2 || createNewTeamName.length() > 12) { errorsFound = true; signUpMergedForm.setErrorTeamName("Team name must be 2 to 12 alphabetic/numeric characters"); } if (signUpMergedForm.getTeamDescription() == null || signUpMergedForm.getTeamDescription().isEmpty()) { errorsFound = true; signUpMergedForm.setErrorTeamDescription("Team description cannot be empty"); } if (signUpMergedForm.getTeamWebsite() == null || signUpMergedForm.getTeamWebsite().isEmpty()) { errorsFound = true; signUpMergedForm.setErrorTeamWebsite("Team website cannot be empty"); } if (errorsFound) { log.warn("Signup new team error {}", signUpMergedForm.toString()); // clear join team name first before submitting the form signUpMergedForm.setJoinTeamName(null); return "signup2"; } else { teamFields.put("name", signUpMergedForm.getTeamName().trim()); teamFields.put("description", signUpMergedForm.getTeamDescription().trim()); teamFields.put("website", signUpMergedForm.getTeamWebsite().trim()); teamFields.put("organisationType", signUpMergedForm.getTeamOrganizationType()); teamFields.put("visibility", signUpMergedForm.getIsPublic()); mainObject.put("isJoinTeam", false); try { registerUserToDeter(mainObject); } catch ( TeamNotFoundException | TeamNameAlreadyExistsException | UsernameAlreadyExistsException | EmailAlreadyExistsException | InvalidTeamNameException | InvalidPasswordException | DeterLabOperationFailedException e) { redirectAttributes.addFlashAttribute(MESSAGE, e.getMessage()); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } catch (Exception e) { redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } log.info("Signup new team success"); return "redirect:/team_application_submitted"; } } else if (joinNewTeamName != null && !joinNewTeamName.isEmpty()) { log.info("Signup join team name {}", joinNewTeamName); // get the team JSON from team name Team2 joinTeamInfo; try { joinTeamInfo = getTeamIdByName(signUpMergedForm.getJoinTeamName().trim()); } catch (TeamNotFoundException | AdapterConnectionException e) { redirectAttributes.addFlashAttribute(MESSAGE, e.getMessage()); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } teamFields.put("id", joinTeamInfo.getId()); // set the flag to indicate to controller that it is joining an existing team mainObject.put("isJoinTeam", true); try { registerUserToDeter(mainObject); } catch ( TeamNotFoundException | AdapterConnectionException | TeamNameAlreadyExistsException | UsernameAlreadyExistsException | EmailAlreadyExistsException | InvalidTeamNameException | InvalidPasswordException | DeterLabOperationFailedException e) { redirectAttributes.addFlashAttribute(MESSAGE, e.getMessage()); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } catch (Exception e) { redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } log.info("Signup join team success"); log.info("jointeam info: {}", joinTeamInfo); redirectAttributes.addFlashAttribute("team", joinTeamInfo); return "redirect:/join_application_submitted"; } else { log.warn("Signup unreachable statement"); // logic error not suppose to reach here // possible if user fill up create new team but without the team name redirectAttributes.addFlashAttribute("signupError", "There is a problem when submitting your form. Please re-enter and submit the details again."); redirectAttributes.addFlashAttribute("signUpMergedForm", signUpMergedForm); return "redirect:/signup2"; } } /** * Use when registering new accounts * * @param mainObject A JSONObject that contains user's credentials, personal details and team application details */ private void registerUserToDeter(JSONObject mainObject) throws WebServiceRuntimeException, TeamNotFoundException, AdapterConnectionException, TeamNameAlreadyExistsException, UsernameAlreadyExistsException, EmailAlreadyExistsException, InvalidTeamNameException, InvalidPasswordException, DeterLabOperationFailedException { HttpEntity<String> request = createHttpEntityWithBodyNoAuthHeader(mainObject.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getSioRegUrl(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); log.info("Register user to deter response: {}", responseBody); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); log.warn("Register user exception error: {}", error.getError()); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Register new user failed on DeterLab: {}", error.getMessage()); throw new DeterLabOperationFailedException(ERROR_PREFIX + (error.getMessage().contains("unknown error") ? ERR_SERVER_OVERLOAD : error.getMessage())); case TEAM_NAME_ALREADY_EXISTS_EXCEPTION: log.warn("Register new users new team request : team name already exists"); throw new TeamNameAlreadyExistsException("Team name already exists"); case INVALID_TEAM_NAME_EXCEPTION: log.warn("Register new users new team request : team name invalid"); throw new InvalidTeamNameException("Invalid team name: must be 6-12 alphanumeric characters only"); case INVALID_PASSWORD_EXCEPTION: log.warn("Register new users new team request : invalid password"); throw new InvalidPasswordException("Password is too simple"); case USERNAME_ALREADY_EXISTS_EXCEPTION: // throw from user service { String email = mainObject.getJSONObject("user").getJSONObject("userDetails").getString("email"); log.warn("Register new users : email already exists: {}", email); throw new UsernameAlreadyExistsException(ERROR_PREFIX + email + " already in use."); } case EMAIL_ALREADY_EXISTS_EXCEPTION: // throw from adapter deterlab { String email = mainObject.getJSONObject("user").getJSONObject("userDetails").getString("email"); log.warn("Register new users : email already exists: {}", email); throw new EmailAlreadyExistsException(ERROR_PREFIX + email + " already in use."); } default: log.warn("Registration or adapter connection fail"); // possible sio or adapter connection fail throw new AdapterConnectionException(ERR_SERVER_OVERLOAD); } } else { // do nothing log.info("Not an error for status code: {}", response.getStatusCode()); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } /** * Use when users register a new account for joining existing team * * @param teamName The team name to join * @return the team id from sio */ private Team2 getTeamIdByName(String teamName) throws WebServiceRuntimeException, TeamNotFoundException, AdapterConnectionException { // FIXME check if team name exists // FIXME check for general exception? HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getTeamByName(teamName), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); if (exceptionState == ExceptionState.TEAM_NOT_FOUND_EXCEPTION) { log.warn("Get team by name : team name error"); throw new TeamNotFoundException("Team name " + teamName + " does not exists"); } else { log.warn("Team service or adapter connection fail"); // possible sio or adapter connection fail throw new AdapterConnectionException(ERR_SERVER_OVERLOAD); } } else { return extractTeamInfo(responseBody); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } //--------------------------Account Settings Page-------------------------- @RequestMapping(value = "/account_settings", method = RequestMethod.GET) public String accountDetails(Model model, HttpSession session) throws WebServiceRuntimeException { String userId_uri = properties.getSioUsersUrl() + session.getAttribute("id"); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(userId_uri, HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { log.error("No user to edit : {}", session.getAttribute("id")); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); throw new RestClientException("[" + error.getError() + "] "); } else { User2 user2 = extractUserInfo(responseBody); // need to do this so that we can compare after submitting the form session.setAttribute(webProperties.getSessionUserAccount(), user2); model.addAttribute("editUser", user2); return "account_settings"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping(value = "/account_settings", method = RequestMethod.POST) public String editAccountDetails( @ModelAttribute("editUser") User2 editUser, final RedirectAttributes redirectAttributes, HttpSession session) throws WebServiceRuntimeException { boolean errorsFound = false; String editPhrase = "editPhrase"; // check fields first if (errorsFound == false && editUser.getFirstName().isEmpty()) { redirectAttributes.addFlashAttribute("editFirstName", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getLastName().isEmpty()) { redirectAttributes.addFlashAttribute("editLastName", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getPhone().isEmpty()) { redirectAttributes.addFlashAttribute("editPhone", "fail"); errorsFound = true; } if (errorsFound == false && (editUser.getPhone().matches("(.*)[a-zA-Z](.*)") || editUser.getPhone().length() < 6)) { // previously already check if phone is empty // now check phone must contain only digits redirectAttributes.addFlashAttribute("editPhone", "fail"); errorsFound = true; } if (errorsFound == false && !editUser.getConfirmPassword().isEmpty() && !editUser.isPasswordValid()) { redirectAttributes.addFlashAttribute(editPhrase, "invalid"); errorsFound = true; } if (errorsFound == false && editUser.getJobTitle().isEmpty()) { redirectAttributes.addFlashAttribute("editJobTitle", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getInstitution().isEmpty()) { redirectAttributes.addFlashAttribute("editInstitution", "fail"); errorsFound = true; } if (errorsFound == false && editUser.getCountry().isEmpty()) { redirectAttributes.addFlashAttribute("editCountry", "fail"); errorsFound = true; } if (errorsFound) { session.removeAttribute(webProperties.getSessionUserAccount()); return "redirect:/account_settings"; } else { // used to compare original and edited User2 objects User2 originalUser = (User2) session.getAttribute(webProperties.getSessionUserAccount()); JSONObject userObject = new JSONObject(); JSONObject userDetails = new JSONObject(); JSONObject address = new JSONObject(); userDetails.put("firstName", editUser.getFirstName()); userDetails.put("lastName", editUser.getLastName()); userDetails.put("email", editUser.getEmail()); userDetails.put("phone", editUser.getPhone()); userDetails.put("jobTitle", editUser.getJobTitle()); userDetails.put("address", address); userDetails.put("institution", editUser.getInstitution()); userDetails.put("institutionAbbreviation", originalUser.getInstitutionAbbreviation()); userDetails.put("institutionWeb", originalUser.getInstitutionWeb()); address.put("address1", originalUser.getAddress1()); address.put("address2", originalUser.getAddress2()); address.put("country", editUser.getCountry()); address.put("city", originalUser.getCity()); address.put("region", originalUser.getRegion()); address.put("zipCode", originalUser.getPostalCode()); userObject.put("userDetails", userDetails); String userId_uri = properties.getSioUsersUrl() + session.getAttribute(webProperties.getSessionUserId()); HttpEntity<String> request = createHttpEntityWithBody(userObject.toString()); restTemplate.exchange(userId_uri, HttpMethod.PUT, request, String.class); if (!originalUser.getFirstName().equals(editUser.getFirstName())) { redirectAttributes.addFlashAttribute("editFirstName", "success"); } if (!originalUser.getLastName().equals(editUser.getLastName())) { redirectAttributes.addFlashAttribute("editLastName", "success"); } if (!originalUser.getPhone().equals(editUser.getPhone())) { redirectAttributes.addFlashAttribute("editPhone", "success"); } if (!originalUser.getJobTitle().equals(editUser.getJobTitle())) { redirectAttributes.addFlashAttribute("editJobTitle", "success"); } if (!originalUser.getInstitution().equals(editUser.getInstitution())) { redirectAttributes.addFlashAttribute("editInstitution", "success"); } if (!originalUser.getCountry().equals(editUser.getCountry())) { redirectAttributes.addFlashAttribute("editCountry", "success"); } // credential service change password if (editUser.isPasswordMatch()) { JSONObject credObject = new JSONObject(); credObject.put("password", editUser.getPassword()); HttpEntity<String> credRequest = createHttpEntityWithBody(credObject.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getUpdateCredentials(session.getAttribute("id").toString()), HttpMethod.PUT, credRequest, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); redirectAttributes.addFlashAttribute(editPhrase, "fail"); } else { redirectAttributes.addFlashAttribute(editPhrase, "success"); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } finally { session.removeAttribute(webProperties.getSessionUserAccount()); } } } return "redirect:/account_settings"; } //--------------------User Side Approve Members Page------------ @RequestMapping("/approve_new_user") public String approveNewUser(Model model, HttpSession session) throws Exception { // HashMap<Integer, Team> rv = new HashMap<Integer, Team>(); // rv = teamManager.getTeamMapByTeamOwner(getSessionIdOfLoggedInUser(session)); // boolean userHasAnyJoinRequest = hasAnyJoinRequest(rv); // model.addAttribute("teamMapOwnedByUser", rv); // model.addAttribute("userHasAnyJoinRequest", userHasAnyJoinRequest); List<JoinRequestApproval> rv = new ArrayList<>(); List<JoinRequestApproval> temp; // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); JSONObject object = new JSONObject(responseBody); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); Team2 team2 = new Team2(); JSONObject teamObject = new JSONObject(teamResponseBody); JSONArray membersArray = teamObject.getJSONArray("members"); team2.setId(teamObject.getString("id")); team2.setName(teamObject.getString("name")); boolean isTeamLeader = false; temp = new ArrayList<>(); for (int j = 0; j < membersArray.length(); j++) { JSONObject memberObject = membersArray.getJSONObject(j); String userId = memberObject.getString("userId"); String teamMemberType = memberObject.getString(MEMBER_TYPE); String teamMemberStatus = memberObject.getString("memberStatus"); String teamJoinedDate = formatZonedDateTime(memberObject.get("joinedDate").toString()); JoinRequestApproval joinRequestApproval = new JoinRequestApproval(); if (userId.equals(session.getAttribute("id").toString()) && teamMemberType.equals(MemberType.OWNER.toString())) { isTeamLeader = true; } if (teamMemberStatus.equals(MemberStatus.PENDING.toString()) && teamMemberType.equals(MemberType.MEMBER.toString())) { User2 myUser = invokeAndExtractUserInfo(userId); joinRequestApproval.setUserId(myUser.getId()); joinRequestApproval.setUserEmail(myUser.getEmail()); joinRequestApproval.setUserName(myUser.getFirstName() + " " + myUser.getLastName()); joinRequestApproval.setApplicationDate(teamJoinedDate); joinRequestApproval.setTeamId(team2.getId()); joinRequestApproval.setTeamName(team2.getName()); joinRequestApproval.setVerified(myUser.getEmailVerified()); temp.add(joinRequestApproval); log.info("Join request: UserId: {}, UserEmail: {}", myUser.getId(), myUser.getEmail()); } } if (isTeamLeader) { if (!temp.isEmpty()) { rv.addAll(temp); } } } model.addAttribute("joinApprovalList", rv); return "approve_new_user"; } @RequestMapping("/approve_new_user/accept/{teamId}/{userId}") public String userSideAcceptJoinRequest( @PathVariable String teamId, @PathVariable String userId, HttpSession session, RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { log.info("Approve join request: User {}, Team {}, Approver {}", userId, teamId, session.getAttribute("id").toString()); JSONObject mainObject = new JSONObject(); JSONObject userFields = new JSONObject(); userFields.put("id", session.getAttribute("id").toString()); mainObject.put("user", userFields); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getApproveJoinRequest(teamId, userId), HttpMethod.POST, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/approve_new_user"; } String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { try { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case EMAIL_NOT_VERIFIED_EXCEPTION: log.warn("Approve join request: User {} email not verified", userId); redirectAttributes.addFlashAttribute(MESSAGE, "User email has not been verified"); break; case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Approve join request: User {}, Team {} fail", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Approve join request fail"); break; default: log.warn("Server side error: {}", error.getError()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/approve_new_user"; } catch (IOException ioe) { log.warn("IOException {}", ioe); throw new WebServiceRuntimeException(ioe.getMessage()); } } // everything looks OK? log.info("Join request has been APPROVED, User {}, Team {}", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Join request has been APPROVED."); return "redirect:/approve_new_user"; } @RequestMapping("/approve_new_user/reject/{teamId}/{userId}") public String userSideRejectJoinRequest( @PathVariable String teamId, @PathVariable String userId, HttpSession session, RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { log.info("Reject join request: User {}, Team {}, Approver {}", userId, teamId, session.getAttribute("id").toString()); JSONObject mainObject = new JSONObject(); JSONObject userFields = new JSONObject(); userFields.put("id", session.getAttribute("id").toString()); mainObject.put("user", userFields); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getRejectJoinRequest(teamId, userId), HttpMethod.DELETE, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/approve_new_user"; } String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { try { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Reject join request: User {}, Team {} fail", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Reject join request fail"); break; default: log.warn("Server side error: {}", error.getError()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/approve_new_user"; } catch (IOException ioe) { log.warn("IOException {}", ioe); throw new WebServiceRuntimeException(ioe.getMessage()); } } // everything looks OK? log.info("Join request has been REJECTED, User {}, Team {}", userId, teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Join request has been REJECTED."); return "redirect:/approve_new_user"; } //--------------------------Teams Page-------------------------- @RequestMapping("/public_teams") public String publicTeamsBeforeLogin(Model model) { TeamManager2 teamManager2 = new TeamManager2(); // get public teams HttpEntity<String> teamRequest = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamsByVisibility(TeamVisibility.PUBLIC.toString()), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); JSONArray teamPublicJsonArray = new JSONArray(teamResponseBody); for (int i = 0; i < teamPublicJsonArray.length(); i++) { JSONObject teamInfoObject = teamPublicJsonArray.getJSONObject(i); Team2 team2 = extractTeamInfo(teamInfoObject.toString()); teamManager2.addTeamToPublicTeamMap(team2); } model.addAttribute("publicTeamMap2", teamManager2.getPublicTeamMap()); return "public_teams"; } @RequestMapping("/teams") public String teams(Model model, HttpSession session) { // int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); // model.addAttribute("infoMsg", teamManager.getInfoMsg()); // model.addAttribute("currentLoggedInUserId", currentLoggedInUserId); // model.addAttribute("teamMap", teamManager.getTeamMap(currentLoggedInUserId)); // model.addAttribute("publicTeamMap", teamManager.getPublicTeamMap()); // model.addAttribute("invitedToParticipateMap2", teamManager.getInvitedToParticipateMap2(currentLoggedInUserId)); // model.addAttribute("joinRequestMap2", teamManager.getJoinRequestTeamMap2(currentLoggedInUserId)); TeamManager2 teamManager2 = new TeamManager2(); // stores the list of images created or in progress of creation by teams // e.g. teamNameA : "created" : [imageA, imageB], "inProgress" : [imageC, imageD] Map<String, Map<String, List<Image>>> imageMap = new HashMap<>(); // get list of teamids String userId = session.getAttribute("id").toString(); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); JSONObject object = new JSONObject(responseBody); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); String userEmail = object.getJSONObject("userDetails").getString("email"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); //Tran: check if team is approved for userId Team2 joinRequestTeam = extractTeamInfoUserJoinRequest(userId, teamResponseBody); if (joinRequestTeam != null) { teamManager2.addTeamToUserJoinRequestTeamMap(joinRequestTeam); } else { Team2 team2 = extractTeamInfo(teamResponseBody); teamManager2.addTeamToTeamMap(team2); imageMap.put(team2.getName(), invokeAndGetImageList(teamId)); //Tran : only retrieve images of approved teams } } // check if inner image map is empty, have to do it via this manner // returns true if the team contains an image list boolean isInnerImageMapPresent = imageMap.values().stream().filter(perTeamImageMap -> !perTeamImageMap.isEmpty()).findFirst().isPresent(); model.addAttribute("userEmail", userEmail); model.addAttribute("teamMap2", teamManager2.getTeamMap()); model.addAttribute("userJoinRequestMap", teamManager2.getUserJoinRequestMap()); model.addAttribute("isInnerImageMapPresent", isInnerImageMapPresent); model.addAttribute("imageMap", imageMap); return "teams"; } /** * Exectues the service-image and returns a Map containing the list of images in two partitions. * One partition contains the list of already created images. * The other partition contains the list of currently saving in progress images. * * @param teamId The ncl team id to retrieve the list of images from. * @return Returns a Map containing the list of images in two partitions. */ private Map<String, List<Image>> invokeAndGetImageList(String teamId) { log.info("Getting list of saved images for team {}", teamId); Map<String, List<Image>> resultMap = new HashMap<>(); List<Image> createdImageList = new ArrayList<>(); List<Image> inProgressImageList = new ArrayList<>(); HttpEntity<String> imageRequest = createHttpEntityHeaderOnly(); ResponseEntity imageResponse; try { imageResponse = restTemplate.exchange(properties.getTeamSavedImages(teamId), HttpMethod.GET, imageRequest, String.class); } catch (ResourceAccessException e) { log.warn("Error connecting to image service: {}", e); return new HashMap<>(); } String imageResponseBody = imageResponse.getBody().toString(); String osImageList = new JSONObject(imageResponseBody).getString(teamId); JSONObject osImageObject = new JSONObject(osImageList); log.debug("osImageList: {}", osImageList); log.debug("osImageObject: {}", osImageObject); if (osImageObject == JSONObject.NULL || osImageObject.length() == 0) { log.info("List of saved images for team {} is empty.", teamId); return resultMap; } for (int k = 0; k < osImageObject.names().length(); k++) { String imageName = osImageObject.names().getString(k); String imageStatus = osImageObject.getString(imageName); log.info("Image list for team {}: image name {}, status {}", teamId, imageName, imageStatus); Image image = new Image(); image.setImageName(imageName); image.setDescription("-"); image.setTeamId(teamId); if ("created".equals(imageStatus)) { createdImageList.add(image); } else if ("notfound".equals(imageStatus)) { inProgressImageList.add(image); } } resultMap.put("created", createdImageList); resultMap.put("inProgress", inProgressImageList); return resultMap; } // @RequestMapping("/accept_participation/{teamId}") // public String acceptParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); // // get user's participation request list // // add this user id to the requested list // teamManager.acceptParticipationRequest(currentLoggedInUserId, teamId); // // remove participation request since accepted // teamManager.removeParticipationRequest(currentLoggedInUserId, teamId); // // // must get team name // String teamName = teamManager.getTeamNameByTeamId(teamId); // teamManager.setInfoMsg("You have just joined Team " + teamName + " !"); // // return "redirect:/teams"; // } // @RequestMapping("/ignore_participation/{teamId}") // public String ignoreParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // // get user's participation request list // // remove this user id from the requested list // String teamName = teamManager.getTeamNameByTeamId(teamId); // teamManager.ignoreParticipationRequest2(getSessionIdOfLoggedInUser(session), teamId); // teamManager.setInfoMsg("You have just ignored a team request from Team " + teamName + " !"); // // return "redirect:/teams"; // } // @RequestMapping("/withdraw/{teamId}") public String withdrawnJoinRequest(@PathVariable Integer teamId, HttpSession session) { // get user team request // remove this user id from the user's request list String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.removeUserJoinRequest2(getSessionIdOfLoggedInUser(session), teamId); teamManager.setInfoMsg("You have withdrawn your join request for Team " + teamName); return "redirect:/teams"; } // @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.GET) // public String inviteMember(@PathVariable Integer teamId, Model model) { // model.addAttribute("teamIdVar", teamId); // model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); // return "team_page_invite_members"; // } // @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.POST) // public String sendInvitation(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm,Model model) { // int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); // teamManager.addInvitedToParticipateMap(userId, teamId); // return "redirect:/teams"; // } @RequestMapping(value = "/teams/members_approval/{teamId}", method = RequestMethod.GET) public String membersApproval(@PathVariable Integer teamId, Model model) { model.addAttribute("team", teamManager.getTeamByTeamId(teamId)); return "team_page_approve_members"; } @RequestMapping("/teams/members_approval/accept/{teamId}/{userId}") public String acceptJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.acceptJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } @RequestMapping("/teams/members_approval/reject/{teamId}/{userId}") public String rejectJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.rejectJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } //--------------------------Team Profile Page-------------------------- @RequestMapping(value = "/team_profile/{teamId}", method = RequestMethod.GET) public String teamProfile(@PathVariable String teamId, Model model, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); Team2 team = extractTeamInfo(responseBody); model.addAttribute("team", team); model.addAttribute("owner", team.getOwner()); model.addAttribute("membersList", team.getMembersStatusMap().get(MemberStatus.APPROVED)); session.setAttribute("originalTeam", team); request = createHttpEntityHeaderOnly(); response = restTemplate.exchange(properties.getExpListByTeamId(teamId), HttpMethod.GET, request, String.class); JSONArray experimentsArray = new JSONArray(response.getBody().toString()); List<Experiment2> experimentList = new ArrayList<>(); Map<Long, Realization> realizationMap = new HashMap<>(); for (int k = 0; k < experimentsArray.length(); k++) { Experiment2 experiment2 = extractExperiment(experimentsArray.getJSONObject(k).toString()); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); realizationMap.put(experiment2.getId(), realization); experimentList.add(experiment2); } model.addAttribute("teamExperimentList", experimentList); model.addAttribute("teamRealizationMap", realizationMap); //Starting to get quota try { response = restTemplate.exchange(properties.getQuotaByTeamId(teamId), HttpMethod.GET, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service for display team quota: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE_TEAM_ID; } responseBody = response.getBody().toString(); // handling exceptions from SIO if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn("Get team quota: Team {} not found", teamId); return REDIRECT_INDEX_PAGE; default: log.warn("Get team quota : sio or deterlab adapter connection error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } } else { log.info("Get team quota info : {}", responseBody); } TeamQuota teamQuota = extractTeamQuotaInfo(responseBody); model.addAttribute("teamQuota", teamQuota); session.setAttribute(ORIGINAL_BUDGET, teamQuota.getBudget()); // this is to check if budget changed later return "team_profile"; } @RequestMapping(value = "/team_profile/{teamId}", method = RequestMethod.POST) public String editTeamProfile( @PathVariable String teamId, @ModelAttribute("team") Team2 editTeam, final RedirectAttributes redirectAttributes, HttpSession session) { boolean errorsFound = false; if (editTeam.getDescription().isEmpty()) { errorsFound = true; redirectAttributes.addFlashAttribute("editDesc", "fail"); } if (errorsFound) { // safer to remove session.removeAttribute("originalTeam"); return REDIRECT_TEAM_PROFILE + editTeam.getId(); } // can edit team description and team website for now JSONObject teamfields = new JSONObject(); teamfields.put("id", teamId); teamfields.put("name", editTeam.getName()); teamfields.put("description", editTeam.getDescription()); teamfields.put("website", "http://default.com"); teamfields.put("organisationType", editTeam.getOrganisationType()); teamfields.put("privacy", "OPEN"); teamfields.put("status", editTeam.getStatus()); teamfields.put("members", editTeam.getMembersList()); HttpEntity<String> request = createHttpEntityWithBody(teamfields.toString()); ResponseEntity response = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.PUT, request, String.class); Team2 originalTeam = (Team2) session.getAttribute("originalTeam"); if (!originalTeam.getDescription().equals(editTeam.getDescription())) { redirectAttributes.addFlashAttribute("editDesc", "success"); } // safer to remove session.removeAttribute("originalTeam"); return REDIRECT_TEAM_PROFILE + teamId; } @RequestMapping(value = "/team_quota/{teamId}", method = RequestMethod.POST) public String editTeamQuota( @PathVariable String teamId, @ModelAttribute("teamQuota") TeamQuota editTeamQuota, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { final String QUOTA = "#quota"; JSONObject teamQuotaJSONObject = new JSONObject(); teamQuotaJSONObject.put(TEAM_ID, teamId); // check if budget is negative or exceeding limit if (!editTeamQuota.getBudget().equals("")) { if (Double.parseDouble(editTeamQuota.getBudget()) < 0) { redirectAttributes.addFlashAttribute(EDIT_BUDGET, "negativeError"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } else if(Double.parseDouble(editTeamQuota.getBudget()) > 99999999.99) { redirectAttributes.addFlashAttribute(EDIT_BUDGET, "exceedingLimit"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } } teamQuotaJSONObject.put("quota", editTeamQuota.getBudget()); HttpEntity<String> request = createHttpEntityWithBody(teamQuotaJSONObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getQuotaByTeamId(teamId), HttpMethod.PUT, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service for display team quota: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE_TEAM_ID; } String responseBody = response.getBody().toString(); // handling exceptions from SIO if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn("Get team quota: Team {} not found", teamId); return REDIRECT_INDEX_PAGE; case TEAM_QUOTA_OUT_OF_RANGE_EXCEPTION: log.warn("Get team quota: Budget is out of range"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; case FORBIDDEN_EXCEPTION: log.warn("Get team quota: Budget can only be updated by team owner."); redirectAttributes.addFlashAttribute(EDIT_BUDGET, "editDeny"); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; default: log.warn("Get team quota : sio or deterlab adapter connection error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } } else { log.info("Edit team quota info : {}", responseBody); } //check if new budget is different in order to display successful message to user String originalBudget = (String) session.getAttribute(ORIGINAL_BUDGET); if (!originalBudget.equals(editTeamQuota.getBudget())) { redirectAttributes.addFlashAttribute(EDIT_BUDGET, "success"); } // safer to remove session.removeAttribute(ORIGINAL_BUDGET); return REDIRECT_TEAM_PROFILE + teamId + QUOTA; } @RequestMapping("/remove_member/{teamId}/{userId}") public String removeMember(@PathVariable String teamId, @PathVariable String userId, final RedirectAttributes redirectAttributes) throws IOException { JSONObject teamMemberFields = new JSONObject(); teamMemberFields.put("userId", userId); teamMemberFields.put(MEMBER_TYPE, MemberType.MEMBER.name()); teamMemberFields.put("memberStatus", MemberStatus.APPROVED.name()); HttpEntity<String> request = createHttpEntityWithBody(teamMemberFields.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; try { response = restTemplate.exchange(properties.removeUserFromTeam(teamId), HttpMethod.DELETE, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio team service for remove user: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_TEAM_PROFILE_TEAM_ID; } String responseBody = response.getBody().toString(); User2 user = invokeAndExtractUserInfo(userId); String name = user.getFirstName() + " " + user.getLastName(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: // two subcases when fail to remove users from team log.warn("Remove member from team: User {}, Team {} fail - {}", userId, teamId, error.getMessage()); if ("user has experiments".equals(error.getMessage())) { // case 1 - user has experiments // display the list of experiments that have to be terminated first // since the team profile page has experiments already, we don't have to retrieve them again // use the userid to filter out the experiment list at the web pages redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " Member " + name + " has experiments."); redirectAttributes.addFlashAttribute(REMOVE_MEMBER_UID, userId); redirectAttributes.addFlashAttribute(REMOVE_MEMBER_NAME, name); break; } else { // case 2 - deterlab operation failure log.warn("Remove member from team: deterlab operation failed"); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " Member " + name + " cannot be removed."); break; } default: log.warn("Server side error for remove members: {}", error.getError()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } } else { log.info("Remove member: {}", response.getBody().toString()); // add success message redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Member " + name + " has been removed."); } return REDIRECT_TEAM_PROFILE_TEAM_ID; } // @RequestMapping("/team_profile/{teamId}/start_experiment/{expId}") // public String startExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // // start experiment // // ensure experiment is stopped first before starting // experimentManager.startExperiment(getSessionIdOfLoggedInUser(session), expId); // return "redirect:/team_profile/{teamId}"; // } // @RequestMapping("/team_profile/{teamId}/stop_experiment/{expId}") // public String stopExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // // stop experiment // // ensure experiment is in ready mode before stopping // experimentManager.stopExperiment(getSessionIdOfLoggedInUser(session), expId); // return "redirect:/team_profile/{teamId}"; // } // @RequestMapping("/team_profile/{teamId}/remove_experiment/{expId}") // public String removeExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // // remove experiment // // TODO check userid is indeed the experiment owner or team owner // // ensure experiment is stopped first // if (experimentManager.removeExperiment(getSessionIdOfLoggedInUser(session), expId) == true) { // // decrease exp count to be display on Teams page // teamManager.decrementExperimentCount(teamId); // } // model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); // return "redirect:/team_profile/{teamId}"; // } // @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.GET) // public String inviteUserFromTeamProfile(@PathVariable Integer teamId, Model model) { // model.addAttribute("teamIdVar", teamId); // model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); // return "team_profile_invite_members"; // } // @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.POST) // public String sendInvitationFromTeamProfile(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm, Model model) { // int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); // teamManager.addInvitedToParticipateMap(userId, teamId); // return "redirect:/team_profile/{teamId}"; // } //--------------------------Apply for New Team Page-------------------------- @RequestMapping(value = "/teams/apply_team", method = RequestMethod.GET) public String teamPageApplyTeam(Model model) { model.addAttribute("teamPageApplyTeamForm", new TeamPageApplyTeamForm()); return "team_page_apply_team"; } @RequestMapping(value = "/teams/apply_team", method = RequestMethod.POST) public String checkApplyTeamInfo( @Valid TeamPageApplyTeamForm teamPageApplyTeamForm, BindingResult bindingResult, HttpSession session, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { final String LOG_PREFIX = "Existing user apply for new team: {}"; if (bindingResult.hasErrors()) { log.warn(LOG_PREFIX, "Application form error " + teamPageApplyTeamForm.toString()); return "team_page_apply_team"; } // log data to ensure data has been parsed log.debug(LOG_PREFIX, properties.getRegisterRequestToApplyTeam(session.getAttribute("id").toString())); log.info(LOG_PREFIX, teamPageApplyTeamForm.toString()); JSONObject mainObject = new JSONObject(); JSONObject teamFields = new JSONObject(); mainObject.put("team", teamFields); teamFields.put("name", teamPageApplyTeamForm.getTeamName()); teamFields.put("description", teamPageApplyTeamForm.getTeamDescription()); teamFields.put("website", teamPageApplyTeamForm.getTeamWebsite()); teamFields.put("organisationType", teamPageApplyTeamForm.getTeamOrganizationType()); teamFields.put("visibility", teamPageApplyTeamForm.getIsPublic()); String nclUserId = session.getAttribute("id").toString(); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { response = restTemplate.exchange(properties.getRegisterRequestToApplyTeam(nclUserId), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { // prepare the exception mapping EnumMap<ExceptionState, String> exceptionMessageMap = new EnumMap<>(ExceptionState.class); exceptionMessageMap.put(USER_ID_NULL_OR_EMPTY_EXCEPTION, "User id is null or empty "); exceptionMessageMap.put(TEAM_NAME_NULL_OR_EMPTY_EXCEPTION, "Team name is null or empty "); exceptionMessageMap.put(USER_NOT_FOUND_EXCEPTION, "User not found"); exceptionMessageMap.put(TEAM_NAME_ALREADY_EXISTS_EXCEPTION, "Team name already exists"); exceptionMessageMap.put(INVALID_TEAM_NAME_EXCEPTION, "Team name contains invalid characters"); exceptionMessageMap.put(TEAM_MEMBER_ALREADY_EXISTS_EXCEPTION, "Team member already exists"); exceptionMessageMap.put(ADAPTER_CONNECTION_EXCEPTION, "Connection to adapter failed"); exceptionMessageMap.put(ADAPTER_INTERNAL_ERROR_EXCEPTION, "Internal server error on adapter"); exceptionMessageMap.put(DETERLAB_OPERATION_FAILED_EXCEPTION, "Operation failed on DeterLab"); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); final String errorMessage = exceptionMessageMap.containsKey(exceptionState) ? error.getMessage() : ERR_SERVER_OVERLOAD; log.warn(LOG_PREFIX, responseBody); redirectAttributes.addFlashAttribute("message", errorMessage); return "redirect:/teams/apply_team"; } else { // no errors, everything ok log.info(LOG_PREFIX, "Application for team " + teamPageApplyTeamForm.getTeamName() + " submitted"); return "redirect:/teams/team_application_submitted"; } } catch (ResourceAccessException | IOException e) { log.error(LOG_PREFIX, e); throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping(value = "/acceptable_usage_policy", method = RequestMethod.GET) public String teamOwnerPolicy() { return "acceptable_usage_policy"; } @RequestMapping(value = "/terms_and_conditions", method = RequestMethod.GET) public String termsAndConditions() { return "terms_and_conditions"; } //--------------------------Join Team Page-------------------------- @RequestMapping(value = "/teams/join_team", method = RequestMethod.GET) public String teamPageJoinTeam(Model model) { model.addAttribute("teamPageJoinTeamForm", new TeamPageJoinTeamForm()); return "team_page_join_team"; } @RequestMapping(value = "/teams/join_team", method = RequestMethod.POST) public String checkJoinTeamInfo( @Valid TeamPageJoinTeamForm teamPageJoinForm, BindingResult bindingResult, Model model, HttpSession session, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { final String LOG_PREFIX = "Existing user join team: {}"; if (bindingResult.hasErrors()) { log.warn(LOG_PREFIX, "Application form error " + teamPageJoinForm.toString()); return "team_page_join_team"; } JSONObject mainObject = new JSONObject(); JSONObject teamFields = new JSONObject(); JSONObject userFields = new JSONObject(); mainObject.put("team", teamFields); mainObject.put("user", userFields); userFields.put("id", session.getAttribute("id")); // ncl-id teamFields.put("name", teamPageJoinForm.getTeamName()); log.info(LOG_PREFIX, "User " + session.getAttribute("id") + ", team " + teamPageJoinForm.getTeamName()); HttpEntity<String> request = createHttpEntityWithBody(mainObject.toString()); ResponseEntity response; try { restTemplate.setErrorHandler(new MyResponseErrorHandler()); response = restTemplate.exchange(properties.getJoinRequestExistingUser(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { // prepare the exception mapping EnumMap<ExceptionState, String> exceptionMessageMap = new EnumMap<>(ExceptionState.class); exceptionMessageMap.put(USER_NOT_FOUND_EXCEPTION, "User not found"); exceptionMessageMap.put(USER_ID_NULL_OR_EMPTY_EXCEPTION, "User id is null or empty"); exceptionMessageMap.put(TEAM_NOT_FOUND_EXCEPTION, "Team name not found"); exceptionMessageMap.put(TEAM_NAME_NULL_OR_EMPTY_EXCEPTION, "Team name is null or empty"); exceptionMessageMap.put(USER_ALREADY_IN_TEAM_EXCEPTION, "User already in team"); exceptionMessageMap.put(TEAM_MEMBER_ALREADY_EXISTS_EXCEPTION, "Team member already exists"); exceptionMessageMap.put(ADAPTER_CONNECTION_EXCEPTION, "Connection to adapter failed"); exceptionMessageMap.put(ADAPTER_INTERNAL_ERROR_EXCEPTION, "Internal server error on adapter"); exceptionMessageMap.put(DETERLAB_OPERATION_FAILED_EXCEPTION, "Operation failed on DeterLab"); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); final String errorMessage = exceptionMessageMap.containsKey(exceptionState) ? error.getMessage() : ERR_SERVER_OVERLOAD; log.warn(LOG_PREFIX, responseBody); redirectAttributes.addFlashAttribute("message", errorMessage); return "redirect:/teams/join_team"; } else { log.info(LOG_PREFIX, "Application for join team " + teamPageJoinForm.getTeamName() + " submitted"); return "redirect:/teams/join_application_submitted/" + teamPageJoinForm.getTeamName(); } } catch (ResourceAccessException | IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } //--------------------------Experiment Page-------------------------- @RequestMapping(value = "/experiments", method = RequestMethod.GET) public String experiments(Model model, HttpSession session) throws WebServiceRuntimeException { // long start = System.currentTimeMillis(); List<Experiment2> experimentList = new ArrayList<>(); Map<Long, Realization> realizationMap = new HashMap<>(); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getDeterUid(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { log.error("No user to get experiment: {}", session.getAttribute("id")); MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); log.info("experiment error: {} - {} - {} - user token:{}", error.getError(), error.getMessage(), error.getLocalizedMessage(), httpScopedSession.getAttribute(webProperties.getSessionJwtToken())); model.addAttribute(DETER_UID, CONNECTION_ERROR); } else { log.info("Show the deter user id: {}", responseBody); model.addAttribute(DETER_UID, responseBody); } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } // get list of teamids ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); if (!isMemberJoinRequestPending(session.getAttribute("id").toString(), teamResponseBody)) { // get experiments lists of the teams HttpEntity<String> expRequest = createHttpEntityHeaderOnly(); ResponseEntity expRespEntity = restTemplate.exchange(properties.getExpListByTeamId(teamId), HttpMethod.GET, expRequest, String.class); JSONArray experimentsArray = new JSONArray(expRespEntity.getBody().toString()); for (int k = 0; k < experimentsArray.length(); k++) { Experiment2 experiment2 = extractExperiment(experimentsArray.getJSONObject(k).toString()); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); realizationMap.put(experiment2.getId(), realization); experimentList.add(experiment2); } } } model.addAttribute("experimentList", experimentList); model.addAttribute("realizationMap", realizationMap); // System.out.println("Elapsed time to get experiment page:" + (System.currentTimeMillis() - start)); return EXPERIMENTS; } @GetMapping(value = "/experiment_profile/{expId}") public String experimentProfile(@PathVariable String expId, Model model) { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getExperiment(expId), HttpMethod.GET, request, String.class); log.info("experiment profile: extract experiment"); Experiment2 experiment2 = extractExperiment(response.getBody().toString()); log.info("experiment profile: extract realization"); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); User2 experimentOwner = invokeAndExtractUserInfo(experiment2.getUserId()); // get experiment details // returns a json string in the format: // { // 'ns_file' : // { // 'msg' : 'success/fail', // 'ns_file' : 'ns_file_contents' // }, // 'realization_details' : // { // 'msg' : 'success/fail', // 'realization_details' : 'realization_details_contents' // }, // 'activity_log' : // { // 'msg' : 'success/fail', // 'activity_log' : 'activity_log_contents' // } // } // returns a '{}' otherwise if fail ResponseEntity expDetailsResponse = restTemplate.exchange(properties.getExperimentDetails(experiment2.getTeamId(), expId), HttpMethod.GET, request, String.class); log.debug("experiment profile - experiment details: {}", expDetailsResponse.getBody().toString()); model.addAttribute("experiment", experiment2); model.addAttribute("realization", realization); model.addAttribute("experimentOwner", experimentOwner.getFirstName() + ' ' + experimentOwner.getLastName()); model.addAttribute("experimentDetails", new JSONObject(expDetailsResponse.getBody().toString())); return "experiment_profile"; } @RequestMapping(value = "/experiments/create", method = RequestMethod.GET) public String createExperiment(Model model, HttpSession session) throws WebServiceRuntimeException { log.info("Loading create experiment page"); // a list of teams that the logged in user is in List<String> scenarioFileNameList = getScenarioFileNameList(); List<Team2> userTeamsList = new ArrayList<>(); // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); JSONObject object = new JSONObject(responseBody); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); Team2 team2 = extractTeamInfo(teamResponseBody); userTeamsList.add(team2); } model.addAttribute("scenarioFileNameList", scenarioFileNameList); model.addAttribute("experimentForm", new ExperimentForm()); model.addAttribute("userTeamsList", userTeamsList); return "experiment_page_create_experiment"; } @RequestMapping(value = "/experiments/create", method = RequestMethod.POST) public String validateExperiment( @ModelAttribute("experimentForm") ExperimentForm experimentForm, HttpSession session, BindingResult bindingResult, final RedirectAttributes redirectAttributes) throws WebServiceRuntimeException { if (bindingResult.hasErrors()) { log.info("Create experiment - form has errors"); return "redirect:/experiments/create"; } if (experimentForm.getName() == null || experimentForm.getName().isEmpty()) { redirectAttributes.addFlashAttribute(MESSAGE, "Experiment Name cannot be empty"); return "redirect:/experiments/create"; } if (experimentForm.getDescription() == null || experimentForm.getDescription().isEmpty()) { redirectAttributes.addFlashAttribute(MESSAGE, "Description cannot be empty"); return "redirect:/experiments/create"; } experimentForm.setScenarioContents(getScenarioContentsFromFile(experimentForm.getScenarioFileName())); JSONObject experimentObject = new JSONObject(); experimentObject.put("userId", session.getAttribute("id").toString()); experimentObject.put(TEAM_ID, experimentForm.getTeamId()); experimentObject.put(TEAM_NAME, experimentForm.getTeamName()); experimentObject.put("name", experimentForm.getName().replaceAll("\\s+", "")); // truncate whitespaces and non-visible characters like \n experimentObject.put("description", experimentForm.getDescription()); experimentObject.put("nsFile", "file"); experimentObject.put("nsFileContent", experimentForm.getNsFileContent()); experimentObject.put("idleSwap", "240"); experimentObject.put("maxDuration", "960"); log.info("Calling service to create experiment"); HttpEntity<String> request = createHttpEntityWithBody(experimentObject.toString()); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getSioExpUrl(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case NS_FILE_PARSE_EXCEPTION: log.warn("Ns file error"); redirectAttributes.addFlashAttribute(MESSAGE, "There is an error when parsing the NS File."); break; case EXPERIMENT_NAME_ALREADY_EXISTS_EXCEPTION: log.warn("Exp name already exists"); redirectAttributes.addFlashAttribute(MESSAGE, "Experiment name already exists."); break; default: log.warn("Exp service or adapter fail"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } log.info("Experiment {} created", experimentForm); return "redirect:/experiments/create"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } // // TODO Uploaded function for network configuration and optional dataset // if (!networkFile.isEmpty()) { // try { // String networkFileName = getSessionIdOfLoggedInUser(session) + "-networkconfig-" + networkFile.getOriginalFilename(); // BufferedOutputStream stream = new BufferedOutputStream( // new FileOutputStream(new File(App.EXP_CONFIG_DIR + "/" + networkFileName))); // FileCopyUtils.copy(networkFile.getInputStream(), stream); // stream.close(); // redirectAttributes.addFlashAttribute(MESSAGE, // "You successfully uploaded " + networkFile.getOriginalFilename() + "!"); // // remember network file name here // } // catch (Exception e) { // redirectAttributes.addFlashAttribute(MESSAGE, // "You failed to upload " + networkFile.getOriginalFilename() + " => " + e.getMessage()); // return "redirect:/experiments/create"; // } // } // // if (!dataFile.isEmpty()) { // try { // String dataFileName = getSessionIdOfLoggedInUser(session) + "-data-" + dataFile.getOriginalFilename(); // BufferedOutputStream stream = new BufferedOutputStream( // new FileOutputStream(new File(App.EXP_CONFIG_DIR + "/" + dataFileName))); // FileCopyUtils.copy(dataFile.getInputStream(), stream); // stream.close(); // redirectAttributes.addFlashAttribute("message2", // "You successfully uploaded " + dataFile.getOriginalFilename() + "!"); // // remember data file name here // } // catch (Exception e) { // redirectAttributes.addFlashAttribute("message2", // "You failed to upload " + dataFile.getOriginalFilename() + " => " + e.getMessage()); // } // } // // // add current experiment to experiment manager // experimentManager.addExperiment(getSessionIdOfLoggedInUser(session), experiment); // // increase exp count to be display on Teams page // teamManager.incrementExperimentCount(experiment.getTeamId()); return "redirect:/experiments"; } @RequestMapping(value = "/experiments/save_image/{teamId}/{expId}/{nodeId}", method = RequestMethod.GET) public String saveExperimentImage(@PathVariable String teamId, @PathVariable String expId, @PathVariable String nodeId, Model model) { Map<String, Map<String, String>> singleNodeInfoMap = new HashMap<>(); Image saveImageForm = new Image(); String teamName = invokeAndExtractTeamInfo(teamId).getName(); Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); // experiment may have many nodes // extract just the particular node details to display for (Map.Entry<String, Map<String, String>> nodesInfo : realization.getNodesInfoMap().entrySet()) { String nodeName = nodesInfo.getKey(); Map<String, String> singleNodeDetailsMap = nodesInfo.getValue(); if (singleNodeDetailsMap.get(NODE_ID).equals(nodeId)) { singleNodeInfoMap.put(nodeName, singleNodeDetailsMap); // store the current os of the node into the form also // have to pass the the services saveImageForm.setCurrentOS(singleNodeDetailsMap.get("os")); } } saveImageForm.setTeamId(teamId); saveImageForm.setNodeId(nodeId); model.addAttribute("teamName", teamName); model.addAttribute("singleNodeInfoMap", singleNodeInfoMap); model.addAttribute("pathTeamId", teamId); model.addAttribute("pathExperimentId", expId); model.addAttribute("pathNodeId", nodeId); model.addAttribute("experimentName", realization.getExperimentName()); model.addAttribute("saveImageForm", saveImageForm); return "save_experiment_image"; } // bindingResult is required in the method signature to perform the JSR303 validation for Image object @RequestMapping(value = "/experiments/save_image/{teamId}/{expId}/{nodeId}", method = RequestMethod.POST) public String saveExperimentImage( @Valid @ModelAttribute("saveImageForm") Image saveImageForm, BindingResult bindingResult, RedirectAttributes redirectAttributes, @PathVariable String teamId, @PathVariable String expId, @PathVariable String nodeId) throws IOException { if (saveImageForm.getImageName().length() < 2) { log.warn("Save image form has errors {}", saveImageForm); redirectAttributes.addFlashAttribute("message", "Image name too short, minimum 2 characters"); return "redirect:/experiments/save_image/" + teamId + "/" + expId + "/" + nodeId; } log.info("Saving image: team {}, experiment {}, node {}", teamId, expId, nodeId); ObjectMapper mapper = new ObjectMapper(); HttpEntity<String> request = createHttpEntityWithBody(mapper.writeValueAsString(saveImageForm)); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.saveImage(), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); log.warn("Save image: error with exception {}", exceptionState); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Save image: error, operation failed on DeterLab"); redirectAttributes.addFlashAttribute("message", error.getMessage()); break; case ADAPTER_CONNECTION_EXCEPTION: log.warn("Save image: error, cannot connect to adapter"); redirectAttributes.addFlashAttribute("message", "connection to adapter failed"); break; case ADAPTER_INTERNAL_ERROR_EXCEPTION: log.warn("Save image: error, adapter internal server error"); redirectAttributes.addFlashAttribute("message", "internal error was found on the adapter"); break; default: log.warn("Save image: other error"); redirectAttributes.addFlashAttribute("message", ERR_SERVER_OVERLOAD); } return "redirect:/experiments/save_image/" + teamId + "/" + expId + "/" + nodeId; } // everything looks ok log.info("Save image in progress: team {}, experiment {}, node {}, image {}", teamId, expId, nodeId, saveImageForm.getImageName()); return "redirect:/experiments"; } /* private String processSaveImageRequest(@Valid @ModelAttribute("saveImageForm") Image saveImageForm, RedirectAttributes redirectAttributes, @PathVariable String teamId, @PathVariable String expId, @PathVariable String nodeId, ResponseEntity response, String responseBody) throws IOException { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); log.warn("Save image exception: {}", exceptionState); switch (exceptionState) { case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("adapter deterlab operation failed exception"); redirectAttributes.addFlashAttribute("message", error.getMessage()); break; default: log.warn("Image service or adapter fail"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute("message", ERR_SERVER_OVERLOAD); break; } return "redirect:/experiments/save_image/" + teamId + "/" + expId + "/" + nodeId; } else { // everything ok log.info("Image service in progress for Team: {}, Exp: {}, Node: {}, Image: {}", teamId, expId, nodeId, saveImageForm.getImageName()); return "redirect:/experiments"; } } */ // @RequestMapping("/experiments/configuration/{expId}") // public String viewExperimentConfiguration(@PathVariable Integer expId, Model model) { // // get experiment from expid // // retrieve the scenario contents to be displayed // Experiment currExp = experimentManager.getExperimentByExpId(expId); // model.addAttribute("scenarioContents", currExp.getScenarioContents()); // return "experiment_scenario_contents"; // } @RequestMapping("/remove_experiment/{teamName}/{teamId}/{expId}") public String removeExperiment(@PathVariable String teamName, @PathVariable String teamId, @PathVariable String expId, final RedirectAttributes redirectAttributes, HttpSession session) throws WebServiceRuntimeException { // ensure experiment is stopped first Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); Team2 team = invokeAndExtractTeamInfo(teamId); // check valid authentication to remove experiments // either admin, experiment creator or experiment owner if (!validateIfAdmin(session) && !realization.getUserId().equals(session.getAttribute("id").toString()) && !team.getOwner().getId().equals(session.getAttribute(webProperties.getSessionUserId()))) { log.warn("Permission denied when remove Team:{}, Experiment: {} with User: {}, Role:{}", teamId, expId, session.getAttribute("id"), session.getAttribute(webProperties.getSessionRoles())); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to remove experiment;" + permissionDeniedMessage); return "redirect:/experiments"; } if (!realization.getState().equals(RealizationState.NOT_RUNNING.toString())) { log.warn("Trying to remove Team: {}, Experiment: {} with State: {} that is still in progress?", teamId, expId, realization.getState()); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to remove Exp: " + realization.getExperimentName() + ". Please refresh the page again. If the error persists, please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } log.info("Removing experiment: at " + properties.getDeleteExperiment(teamId, expId)); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; try { response = restTemplate.exchange(properties.getDeleteExperiment(teamId, expId), HttpMethod.DELETE, request, String.class); } catch (Exception e) { log.warn("Error connecting to experiment service to remove experiment", e.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/experiments"; } String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case EXPERIMENT_DELETE_EXCEPTION: case FORBIDDEN_EXCEPTION: log.warn("remove experiment failed for Team: {}, Exp: {}", teamId, expId); redirectAttributes.addFlashAttribute(MESSAGE, error.getMessage()); break; case OBJECT_OPTIMISTIC_LOCKING_FAILURE_EXCEPTION: // do nothing log.info("remove experiment database locking failure"); break; default: // do nothing break; } return "redirect:/experiments"; } else { // everything ok log.info("remove experiment success for Team: {}, Exp: {}", teamId, expId); redirectAttributes.addFlashAttribute("exp_remove_message", "Team: " + teamName + " has removed Exp: " + realization.getExperimentName()); return "redirect:/experiments"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping("/start_experiment/{teamName}/{expId}") public String startExperiment( @PathVariable String teamName, @PathVariable String expId, final RedirectAttributes redirectAttributes, Model model, HttpSession session) throws WebServiceRuntimeException { // ensure experiment is stopped first before starting Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); if (!checkPermissionRealizeExperiment(realization, session)) { log.warn("Permission denied to start experiment: {} for team: {}", realization.getExperimentName(), teamName); redirectAttributes.addFlashAttribute(MESSAGE, permissionDeniedMessage); return "redirect:/experiments"; } String teamId = realization.getTeamId(); String teamStatus = getTeamStatus(teamId); if (!teamStatus.equals(TeamStatus.APPROVED.name())) { log.warn("Error: trying to realize an experiment {} on team {} with status {}", realization.getExperimentName(), teamId, teamStatus); redirectAttributes.addFlashAttribute(MESSAGE, teamName + " is in " + teamStatus + " status and does not have permission to start experiment. Please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } if (!realization.getState().equals(RealizationState.NOT_RUNNING.toString())) { log.warn("Trying to start Team: {}, Experiment: {} with State: {} that is not running?", teamName, expId, realization.getState()); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to start Exp: " + realization.getExperimentName() + ". Please refresh the page again. If the error persists, please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } //start experiment log.info("Starting experiment: at " + properties.getStartExperiment(teamName, expId)); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; try { response = restTemplate.exchange(properties.getStartExperiment(teamName, expId), HttpMethod.POST, request, String.class); } catch (Exception e) { log.warn("Error connecting to experiment service to start experiment", e.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/experiments"; } String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case EXPERIMENT_START_EXCEPTION: case FORBIDDEN_EXCEPTION: log.warn("start experiment failed for Team: {}, Exp: {}", teamName, expId); redirectAttributes.addFlashAttribute(MESSAGE, error.getMessage()); return "redirect:/experiments"; case TEAM_NOT_FOUND_EXCEPTION: log.warn("Check team quota to start experiment: Team {} not found", teamName); return REDIRECT_INDEX_PAGE; case INSUFFICIENT_QUOTA_EXCEPTION: log.warn("Check team quota to start experiment: Team {} do not have sufficient quota", teamName); redirectAttributes.addFlashAttribute(MESSAGE, "There is insufficient quota for you to start this experiment. Please contact your team leader for more details."); return "redirect:/experiments"; case OBJECT_OPTIMISTIC_LOCKING_FAILURE_EXCEPTION: // do nothing log.info("start experiment database locking failure"); break; default: // do nothing break; } log.warn("start experiment some other error occurred exception: {}", exceptionState); // possible for it to be error but experiment has started up finish // if user clicks on start but reloads the page // model.addAttribute(EXPERIMENT_MESSAGE, "Team: " + teamName + " has started Exp: " + realization.getExperimentName()); return EXPERIMENTS; } else { // everything ok log.info("start experiment success for Team: {}, Exp: {}", teamName, expId); redirectAttributes.addFlashAttribute(EXPERIMENT_MESSAGE, "Experiment " + realization.getExperimentName() + " in team " + teamName + " is starting. This may take up to 10 minutes depending on the scale of your experiment. Please refresh this page later."); return "redirect:/experiments"; } } catch (IOException e) { log.warn("start experiment error: {]", e.getMessage()); throw new WebServiceRuntimeException(e.getMessage()); } } @RequestMapping("/stop_experiment/{teamName}/{expId}") public String stopExperiment(@PathVariable String teamName, @PathVariable String expId, Model model, final RedirectAttributes redirectAttributes, HttpSession session) throws WebServiceRuntimeException { // ensure experiment is active first before stopping Realization realization = invokeAndExtractRealization(teamName, Long.parseLong(expId)); if (isNotAdminAndNotInTeam(session, realization)) { log.warn("Permission denied to stop experiment: {} for team: {}", realization.getExperimentName(), teamName); redirectAttributes.addFlashAttribute(MESSAGE, permissionDeniedMessage); return "redirect:/experiments"; } if (!realization.getState().equals(RealizationState.RUNNING.toString())) { log.warn("Trying to stop Team: {}, Experiment: {} with State: {} that is still in progress?", teamName, expId, realization.getState()); redirectAttributes.addFlashAttribute(MESSAGE, "An error occurred while trying to stop Exp: " + realization.getExperimentName() + ". Please refresh the page again. If the error persists, please contact " + CONTACT_EMAIL); return "redirect:/experiments"; } log.info("Stopping experiment: at " + properties.getStopExperiment(teamName, expId)); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response; return abc(teamName, expId, redirectAttributes, realization, request); } @RequestMapping("/get_topology/{teamName}/{expId}") @ResponseBody public String getTopology(@PathVariable String teamName, @PathVariable String expId) { try { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getTopology(teamName, expId), HttpMethod.GET, request, String.class); log.info("Retrieve experiment topo success"); return "data:image/png;base64," + response.getBody(); } catch (Exception e) { log.error("Error getting topology thumbnail", e.getMessage()); return ""; } } private String abc(@PathVariable String teamName, @PathVariable String expId, RedirectAttributes redirectAttributes, Realization realization, HttpEntity<String> request) throws WebServiceRuntimeException { ResponseEntity response; try { response = restTemplate.exchange(properties.getStopExperiment(teamName, expId), HttpMethod.POST, request, String.class); } catch (Exception e) { log.warn("Error connecting to experiment service to stop experiment", e.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return "redirect:/experiments"; } String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); if (exceptionState == ExceptionState.FORBIDDEN_EXCEPTION) { log.warn("Permission denied to stop experiment: {} for team: {}", realization.getExperimentName(), teamName); redirectAttributes.addFlashAttribute(MESSAGE, permissionDeniedMessage); } if (exceptionState == ExceptionState.OBJECT_OPTIMISTIC_LOCKING_FAILURE_EXCEPTION) { log.info("stop experiment database locking failure"); } } else { // everything ok log.info("stop experiment success for Team: {}, Exp: {}", teamName, expId); redirectAttributes.addFlashAttribute(EXPERIMENT_MESSAGE, "Experiment " + realization.getExperimentName() + " in team " + teamName + " is stopping. Please refresh this page in a few minutes."); } return "redirect:/experiments"; } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } private boolean isNotAdminAndNotInTeam(HttpSession session, Realization realization) { return !validateIfAdmin(session) && !checkPermissionRealizeExperiment(realization, session); } //----------------------------------------------------------------------- //--------------------------Admin Revamp--------------------------------- //----------------------------------------------------------------------- //---------------------------------Admin--------------------------------- @RequestMapping("/admin") public String admin(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } List<Team2> pendingApprovalTeamsList = new ArrayList<>(); //------------------------------------ // get list of teams pending for approval //------------------------------------ HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getSioTeamsUrl(), HttpMethod.GET, request, String.class); JSONArray jsonArray = new JSONArray(responseEntity.getBody().toString()); for (int i = 0; i < jsonArray.length(); i++) { JSONObject jsonObject = jsonArray.getJSONObject(i); Team2 one = extractTeamInfo(jsonObject.toString()); if (one.getStatus().equals(TeamStatus.PENDING.name())) { pendingApprovalTeamsList.add(one); } } model.addAttribute("pendingApprovalTeamsList", pendingApprovalTeamsList); return "admin3"; } @RequestMapping("/admin/data") public String adminDataManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of datasets //------------------------------------ HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getData(), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); List<Dataset> datasetsList = new ArrayList<>(); JSONArray dataJsonArray = new JSONArray(responseBody); for (int i = 0; i < dataJsonArray.length(); i++) { JSONObject dataInfoObject = dataJsonArray.getJSONObject(i); Dataset dataset = extractDataInfo(dataInfoObject.toString()); datasetsList.add(dataset); } ResponseEntity response4 = restTemplate.exchange(properties.getDownloadStat(), HttpMethod.GET, request, String.class); String responseBody4 = response4.getBody().toString(); Map<Integer, Long> dataDownloadStats = new HashMap<>(); JSONArray statJsonArray = new JSONArray(responseBody4); for (int i = 0; i < statJsonArray.length(); i++) { JSONObject statInfoObject = statJsonArray.getJSONObject(i); dataDownloadStats.put(statInfoObject.getInt("dataId"), statInfoObject.getLong("count")); } model.addAttribute("dataList", datasetsList); model.addAttribute("downloadStats", dataDownloadStats); return "data_dashboard"; } @RequestMapping("/admin/data/{datasetId}/resources") public String adminViewDataResources(@PathVariable String datasetId, Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //---------------------------------------- // get list of data resources in a dataset //---------------------------------------- Dataset dataset = invokeAndExtractDataInfo(Long.parseLong(datasetId)); model.addAttribute("dataset", dataset); return "admin_data_resources"; } @RequestMapping(value = "/admin/data/{datasetId}/resources/{resourceId}/update", method = RequestMethod.GET) public String adminUpdateResource(@PathVariable String datasetId, @PathVariable String resourceId, Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } Dataset dataset = invokeAndExtractDataInfo(Long.parseLong(datasetId)); DataResource currentDataResource = new DataResource(); for (DataResource dataResource : dataset.getDataResources()) { if (dataResource.getId() == Long.parseLong(resourceId)) { currentDataResource = dataResource; break; } } model.addAttribute("did", dataset.getId()); model.addAttribute("dataresource", currentDataResource); session.setAttribute(ORIGINAL_DATARESOURCE, currentDataResource); return "admin_data_resources_update"; } // updates the malicious status of a data resource @RequestMapping(value = "/admin/data/{datasetId}/resources/{resourceId}/update", method = RequestMethod.POST) public String adminUpdateResourceFormSubmit(@PathVariable String datasetId, @PathVariable String resourceId, @ModelAttribute DataResource dataResource, Model model, HttpSession session, RedirectAttributes redirectAttributes) throws IOException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } DataResource original = (DataResource) session.getAttribute(ORIGINAL_DATARESOURCE); Dataset dataset = invokeAndExtractDataInfo(Long.parseLong(datasetId)); updateDataset(dataset, dataResource); // add redirect attributes variable to notify what has been modified if (!original.getMaliciousFlag().equalsIgnoreCase(dataResource.getMaliciousFlag())) { redirectAttributes.addFlashAttribute("editMaliciousFlag", "success"); } log.info("Data updated... {}", dataset.getName()); model.addAttribute("did", dataset.getId()); model.addAttribute("dataresource", dataResource); session.removeAttribute(ORIGINAL_DATARESOURCE); return "redirect:/admin/data/" + datasetId + "/resources/" + resourceId + "/update"; } private Dataset updateDataset(Dataset dataset, DataResource dataResource) throws IOException { log.info("Data resource updating... {}", dataResource); HttpEntity<String> request = createHttpEntityWithBody(objectMapper.writeValueAsString(dataResource)); ResponseEntity response = restTemplate.exchange(properties.getResource(dataset.getId().toString(), dataResource.getId().toString()), HttpMethod.PUT, request, String.class); Dataset updatedDataset = extractDataInfo(response.getBody().toString()); log.info("Data resource updated... {}", dataResource.getUri()); return updatedDataset; } @RequestMapping("/admin/experiments") public String adminExperimentsManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of experiments //------------------------------------ HttpEntity<String> expRequest = createHttpEntityHeaderOnly(); ResponseEntity expResponseEntity = restTemplate.exchange(properties.getSioExpUrl(), HttpMethod.GET, expRequest, String.class); //------------------------------------ // get list of realizations //------------------------------------ HttpEntity<String> realizationRequest = createHttpEntityHeaderOnly(); ResponseEntity realizationResponseEntity = restTemplate.exchange(properties.getAllRealizations(), HttpMethod.GET, realizationRequest, String.class); JSONArray jsonExpArray = new JSONArray(expResponseEntity.getBody().toString()); JSONArray jsonRealizationArray = new JSONArray(realizationResponseEntity.getBody().toString()); Map<Experiment2, Realization> experiment2Map = new HashMap<>(); // exp id, experiment Map<Long, Realization> realizationMap = new HashMap<>(); // exp id, realization for (int k = 0; k < jsonRealizationArray.length(); k++) { Realization realization; try { realization = extractRealization(jsonRealizationArray.getJSONObject(k).toString()); } catch (JSONException e) { log.debug("Admin extract realization {}", e); realization = getCleanRealization(); } if (realization.getState().equals(RealizationState.RUNNING.name())) { realizationMap.put(realization.getExperimentId(), realization); } } for (int i = 0; i < jsonExpArray.length(); i++) { Experiment2 experiment2 = extractExperiment(jsonExpArray.getJSONObject(i).toString()); if (realizationMap.containsKey(experiment2.getId())) { experiment2Map.put(experiment2, realizationMap.get(experiment2.getId())); } } model.addAttribute("runningExpMap", experiment2Map); return "experiment_dashboard"; } @RequestMapping("/admin/teams") public String adminTeamsManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of teams //------------------------------------ TeamManager2 teamManager2 = new TeamManager2(); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getSioTeamsUrl(), HttpMethod.GET, request, String.class); JSONArray jsonArray = new JSONArray(responseEntity.getBody().toString()); for (int i = 0; i < jsonArray.length(); i++) { JSONObject jsonObject = jsonArray.getJSONObject(i); Team2 one = extractTeamInfo(jsonObject.toString()); teamManager2.addTeamToTeamMap(one); } model.addAttribute("teamsMap", teamManager2.getTeamMap()); return "team_dashboard"; } @RequestMapping("/admin/users") public String adminUsersManagement(Model model, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //------------------------------------ // get list of users //------------------------------------ Map<String, List<String>> userToTeamMap = new HashMap<>(); // userId : list of team names HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response2 = restTemplate.exchange(properties.getSioUsersUrl(), HttpMethod.GET, request, String.class); String responseBody2 = response2.getBody().toString(); JSONArray jsonUserArray = new JSONArray(responseBody2); List<User2> usersList = new ArrayList<>(); for (int i = 0; i < jsonUserArray.length(); i++) { JSONObject userObject = jsonUserArray.getJSONObject(i); User2 user = extractUserInfo(userObject.toString()); usersList.add(user); // get list of teams' names for each user List<String> perUserTeamList = new ArrayList<>(); if (userObject.get("teams") != null) { JSONArray teamJsonArray = userObject.getJSONArray("teams"); for (int k = 0; k < teamJsonArray.length(); k++) { Team2 team = invokeAndExtractTeamInfo(teamJsonArray.get(k).toString()); perUserTeamList.add(team.getName()); } userToTeamMap.put(user.getId(), perUserTeamList); } } model.addAttribute("usersList", usersList); model.addAttribute("userToTeamMap", userToTeamMap); return "user_dashboard"; } @RequestMapping("/admin/usage") public String adminTeamUsage(Model model, @RequestParam(value = "team", required = false) String team, @RequestParam(value = "start", required = false) String start, @RequestParam(value = "end", required = false) String end, HttpSession session) { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); ZonedDateTime now = ZonedDateTime.now(); if (start == null) { ZonedDateTime startDate = now.with(firstDayOfMonth()); start = startDate.format(formatter); } if (end == null) { ZonedDateTime endDate = now.with(lastDayOfMonth()); end = endDate.format(formatter); } // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getSioTeamsUrl(), HttpMethod.GET, request, String.class); JSONArray jsonArray = new JSONArray(responseEntity.getBody().toString()); TeamManager2 teamManager2 = new TeamManager2(); for (int i = 0; i < jsonArray.length(); i++) { JSONObject jsonObject = jsonArray.getJSONObject(i); Team2 one = extractTeamInfo(jsonObject.toString()); teamManager2.addTeamToTeamMap(one); } if (team != null) { responseEntity = restTemplate.exchange(properties.getUsageStat(team, "startDate=" + start, "endDate=" + end), HttpMethod.GET, request, String.class); String usage = responseEntity.getBody().toString(); model.addAttribute("usage", usage); } model.addAttribute("teamsMap", teamManager2.getTeamMap()); model.addAttribute("start", start); model.addAttribute("end", end); model.addAttribute("team", team); return "usage_statistics"; } @RequestMapping(value = "/admin/energy", method = RequestMethod.GET) public String adminEnergy(Model model, @RequestParam(value = "start", required = false) String start, @RequestParam(value = "end", required = false) String end, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); ZonedDateTime now = ZonedDateTime.now(); if (start == null) { ZonedDateTime startDate = now.with(firstDayOfMonth()); start = startDate.format(formatter); } if (end == null) { ZonedDateTime endDate = now.with(lastDayOfMonth()); end = endDate.format(formatter); } HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity; try { responseEntity = restTemplate.exchange(properties.getEnergyStatistics("startDate=" + start, "endDate=" + end), HttpMethod.GET, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio analytics service for energy usage: {}", e); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_ENERGY_USAGE; } String responseBody = responseEntity.getBody().toString(); JSONArray jsonArray = new JSONArray(responseBody); // handling exceptions from SIO if (RestUtil.isError(responseEntity.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case START_DATE_AFTER_END_DATE_EXCEPTION: log.warn("Get energy usage : Start date after end date error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_START_DATE_AFTER_END_DATE); return REDIRECT_ENERGY_USAGE; default: log.warn("Get energy usage : sio or deterlab adapter connection error"); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); return REDIRECT_ENERGY_USAGE; } } else { log.info("Get energy usage info : {}", responseBody); } DecimalFormat df2 = new DecimalFormat(".##"); double sumEnergy = 0.00; List<String> listOfDate = new ArrayList<>(); List<Double> listOfEnergy = new ArrayList<>(); ZonedDateTime currentZonedDateTime = convertToZonedDateTime(start); String currentDate = null; for (int i = 0; i < jsonArray.length(); i++) { sumEnergy += jsonArray.getDouble(i); // add into listOfDate to display graph currentDate = currentZonedDateTime.format(formatter); listOfDate.add(currentDate); // add into listOfEnergy to display graph double energy = Double.valueOf(df2.format(jsonArray.getDouble(i))); listOfEnergy.add(energy); currentZonedDateTime = convertToZonedDateTime(currentDate).plusDays(1); } sumEnergy = Double.valueOf(df2.format(sumEnergy)); model.addAttribute("listOfDate", listOfDate); model.addAttribute("listOfEnergy", listOfEnergy); model.addAttribute("start", start); model.addAttribute("end", end); model.addAttribute("energy", sumEnergy); return "energy_usage"; } /** * Get simple ZonedDateTime from date string in the format 'YYYY-MM-DD'. * @param date date string to convert * @return ZonedDateTime of */ private ZonedDateTime convertToZonedDateTime(String date) { String[] result = date.split("-"); return ZonedDateTime.of( Integer.parseInt(result[0]), Integer.parseInt(result[1]), Integer.parseInt(result[2]), 0, 0, 0, 0, ZoneId.of("Asia/Singapore")); } // @RequestMapping(value="/admin/domains/add", method=RequestMethod.POST) // public String addDomain(@Valid Domain domain, BindingResult bindingResult) { // if (bindingResult.hasErrors()) { // return "redirect:/admin"; // } else { // domainManager.addDomains(domain.getDomainName()); // } // return "redirect:/admin"; // } // @RequestMapping("/admin/domains/remove/{domainKey}") // public String removeDomain(@PathVariable String domainKey) { // domainManager.removeDomains(domainKey); // return "redirect:/admin"; // } @RequestMapping("/admin/teams/accept/{teamId}/{teamOwnerId}") public String approveTeam( @PathVariable String teamId, @PathVariable String teamOwnerId, final RedirectAttributes redirectAttributes, HttpSession session ) throws WebServiceRuntimeException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //FIXME require approver info log.info("Approving new team {}, team owner {}", teamId, teamOwnerId); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange( properties.getApproveTeam(teamId, teamOwnerId, TeamStatus.APPROVED), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error; try { error = objectMapper.readValue(responseBody, MyErrorResource.class); } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Approve team: TeamId cannot be null or empty: {}", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "TeamId cannot be null or empty"); break; case USER_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Approve team: UserId cannot be null or empty: {}", teamOwnerId); redirectAttributes.addFlashAttribute(MESSAGE, "UserId cannot be null or empty"); break; case EMAIL_NOT_VERIFIED_EXCEPTION: log.warn("Approve team: User {} email not verified", teamOwnerId); redirectAttributes.addFlashAttribute(MESSAGE, "User email has not been verified"); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn("Approve team: TeamStatus is invalid"); redirectAttributes.addFlashAttribute(MESSAGE, "Team status is invalid"); break; case TEAM_NOT_FOUND_EXCEPTION: log.warn("Approve team: Team {} not found", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Team does not exist"); break; case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Approve team: Team {} fail", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Approve team request fail on Deterlab"); break; default: log.warn("Approve team : sio or deterlab adapter connection error"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } // http status code is OK, then need to check the response message String msg = new JSONObject(responseBody).getString("msg"); if ("approve project OK".equals(msg)) { log.info("Approve team {} OK", teamId); } else { log.warn("Approve team {} FAIL", teamId); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } @RequestMapping("/admin/teams/reject/{teamId}/{teamOwnerId}") public String rejectTeam( @PathVariable String teamId, @PathVariable String teamOwnerId, @RequestParam("reason") String reason, final RedirectAttributes redirectAttributes, HttpSession session ) throws WebServiceRuntimeException { if (!validateIfAdmin(session)) { return NO_PERMISSION_PAGE; } //FIXME require approver info log.info("Rejecting new team {}, team owner {}, reason {}", teamId, teamOwnerId, reason); HttpEntity<String> request = createHttpEntityWithBody(reason); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange( properties.getApproveTeam(teamId, teamOwnerId, TeamStatus.REJECTED), HttpMethod.POST, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error; try { error = objectMapper.readValue(responseBody, MyErrorResource.class); } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Reject team: TeamId cannot be null or empty: {}", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "TeamId cannot be null or empty"); break; case USER_ID_NULL_OR_EMPTY_EXCEPTION: log.warn("Reject team: UserId cannot be null or empty: {}", teamOwnerId); redirectAttributes.addFlashAttribute(MESSAGE, "UserId cannot be null or empty"); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn("Reject team: TeamStatus is invalid"); redirectAttributes.addFlashAttribute(MESSAGE, "Team status is invalid"); break; case TEAM_NOT_FOUND_EXCEPTION: log.warn("Reject team: Team {} not found", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Team does not exist"); break; case DETERLAB_OPERATION_FAILED_EXCEPTION: log.warn("Reject team: Team {} fail", teamId); redirectAttributes.addFlashAttribute(MESSAGE, "Reject team request fail on Deterlab"); break; default: log.warn("Reject team : sio or deterlab adapter connection error"); // possible sio or adapter connection fail redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } // http status code is OK, then need to check the response message String msg = new JSONObject(responseBody).getString("msg"); if ("reject project OK".equals(msg)) { log.info("Reject team {} OK", teamId); } else { log.warn("Reject team {} FAIL", teamId); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } @RequestMapping("/admin/teams/{teamId}") public String setupTeamRestriction( @PathVariable final String teamId, @RequestParam(value = "action", required = true) final String action, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { final String LOG_MESSAGE = "Updating restriction settings for team {}: {}"; // check if admin if (!validateIfAdmin(session)) { log.warn(LOG_MESSAGE, teamId, PERMISSION_DENIED); return NO_PERMISSION_PAGE; } Team2 team = invokeAndExtractTeamInfo(teamId); // check if team is approved before restricted if ("restrict".equals(action) && team.getStatus().equals(TeamStatus.APPROVED.name())) { return restrictTeam(team, redirectAttributes); } // check if team is restricted before freeing it back to approved else if ("free".equals(action) && team.getStatus().equals(TeamStatus.RESTRICTED.name())) { return freeTeam(team, redirectAttributes); } else { log.warn(LOG_MESSAGE, teamId, "Cannot " + action + " team with status " + team.getStatus()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + "Cannot " + action + " team " + team.getName() + " with status " + team.getStatus()); return "redirect:/admin/teams"; } } private String restrictTeam(final Team2 team, RedirectAttributes redirectAttributes) throws IOException { log.info("Restricting team {}", team.getId()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioTeamsStatusUrl(team.getId(), TeamStatus.RESTRICTED), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); String logMessage = "Failed to restrict team {}: {}"; switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn(logMessage, team.getId(), TEAM_NOT_FOUND); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + TEAM_NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case FORBIDDEN_EXCEPTION: log.warn(logMessage, team.getId(), PERMISSION_DENIED); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + PERMISSION_DENIED); break; default: log.warn(logMessage, team.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } else { // good log.info("Team {} has been restricted", team.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Team " + team.getName() + " status has been changed to " + TeamStatus.RESTRICTED.name()); return "redirect:/admin"; } } private String freeTeam(final Team2 team, RedirectAttributes redirectAttributes) throws IOException { log.info("Freeing team {}", team.getId()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioTeamsStatusUrl(team.getId(), TeamStatus.APPROVED), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); String logMessage = "Failed to free team {}: {}"; switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn(logMessage, team.getId(), TEAM_NOT_FOUND); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + TEAM_NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case INVALID_TEAM_STATUS_EXCEPTION: log.warn(logMessage, team.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage()); break; case FORBIDDEN_EXCEPTION: log.warn(logMessage, team.getId(), PERMISSION_DENIED); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + PERMISSION_DENIED); break; default: log.warn(logMessage, team.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); } return "redirect:/admin"; } else { // good log.info("Team {} has been freed", team.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "Team " + team.getName() + " status has been changed to " + TeamStatus.APPROVED.name()); return "redirect:/admin"; } } @RequestMapping("/admin/users/{userId}") public String freezeUnfreezeUsers( @PathVariable final String userId, @RequestParam(value = "action", required = true) final String action, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { User2 user = invokeAndExtractUserInfo(userId); // check if admin if (!validateIfAdmin(session)) { log.warn("Access denied when trying to freeze/unfreeze user {}: must be admin!", userId); return NO_PERMISSION_PAGE; } // check if user status is approved before freeze if ("freeze".equals(action) && user.getStatus().equals(UserStatus.APPROVED.toString())) { return freezeUser(user, redirectAttributes); } // check if user status is frozen before unfreeze else if ("unfreeze".equals(action) && user.getStatus().equals(UserStatus.FROZEN.toString())) { return unfreezeUser(user, redirectAttributes); } else { log.warn("Error in freeze/unfreeze user {}: failed to {} user with status {}", userId, action, user.getStatus()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + "failed to " + action + " user " + user.getEmail() + " with status " + user.getStatus()); return "redirect:/admin/users"; } } private String freezeUser(final User2 user, RedirectAttributes redirectAttributes) throws IOException { log.info("Freezing user {}, email {}", user.getId(), user.getEmail()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioUsersStatusUrl(user.getId(), UserStatus.FROZEN.toString()), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case USER_NOT_FOUND_EXCEPTION: log.warn("Failed to freeze user {}: user not found", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn("Failed to freeze user {}: invalid status transition {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not allowed."); break; case INVALID_USER_STATUS_EXCEPTION: log.warn("Failed to freeze user {}: invalid user status {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not a valid status."); break; case FORBIDDEN_EXCEPTION: log.warn("Failed to freeze user {}: must be an Admin", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " permission denied."); break; default: log.warn("Failed to freeze user {}: {}", user.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } else { // good log.info("User {} has been frozen", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "User " + user.getEmail() + " has been banned."); return "redirect:/admin"; } } private String unfreezeUser(final User2 user, RedirectAttributes redirectAttributes) throws IOException { log.info("Unfreezing user {}, email {}", user.getId(), user.getEmail()); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange( properties.getSioUsersStatusUrl(user.getId(), UserStatus.APPROVED.toString()), HttpMethod.PUT, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case USER_NOT_FOUND_EXCEPTION: log.warn("Failed to unfreeze user {}: user not found", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + NOT_FOUND); break; case INVALID_STATUS_TRANSITION_EXCEPTION: log.warn("Failed to unfreeze user {}: invalid status transition {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not allowed."); break; case INVALID_USER_STATUS_EXCEPTION: log.warn("Failed to unfreeze user {}: invalid user status {}", user.getId(), error.getMessage()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + error.getMessage() + " is not a valid status."); break; case FORBIDDEN_EXCEPTION: log.warn("Failed to unfreeze user {}: must be an Admin", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " permission denied."); break; default: log.warn("Failed to unfreeze user {}: {}", user.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } return "redirect:/admin"; } else { // good log.info("User {} has been unfrozen", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "User " + user.getEmail() + " has been unbanned."); return "redirect:/admin"; } } @RequestMapping("/admin/users/{userId}/remove") public String removeUser(@PathVariable final String userId, final RedirectAttributes redirectAttributes, HttpSession session) throws IOException { // check if admin if (!validateIfAdmin(session)) { log.warn("Access denied when trying to remove user {}: must be admin!", userId); return NO_PERMISSION_PAGE; } User2 user = invokeAndExtractUserInfo(userId); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getUser(user.getId()), HttpMethod.DELETE, request, String.class); String responseBody = response.getBody().toString(); if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case USER_NOT_FOUND_EXCEPTION: log.warn("Failed to remove user {}: user not found", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + NOT_FOUND); break; case USER_IS_NOT_DELETABLE_EXCEPTION: log.warn("Failed to remove user {}: user is not deletable", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + " is not deletable."); break; case CREDENTIALS_NOT_FOUND_EXCEPTION: log.warn("Failed to remove user {}: unable to find credentials", user.getId()); redirectAttributes.addFlashAttribute(MESSAGE, ERROR_PREFIX + " user " + user.getEmail() + " is not found."); break; default: log.warn("Failed to remove user {}: {}", user.getId(), exceptionState.getExceptionName()); redirectAttributes.addFlashAttribute(MESSAGE, ERR_SERVER_OVERLOAD); break; } } else { log.info("User {} has been removed", userId); redirectAttributes.addFlashAttribute(MESSAGE_SUCCESS, "User " + user.getEmail() + " has been removed."); } return "redirect:/admin/users"; } // @RequestMapping("/admin/experiments/remove/{expId}") // public String adminRemoveExp(@PathVariable Integer expId) { // int teamId = experimentManager.getExperimentByExpId(expId).getTeamId(); // experimentManager.adminRemoveExperiment(expId); // // // decrease exp count to be display on Teams page // teamManager.decrementExperimentCount(teamId); // return "redirect:/admin"; // } // @RequestMapping(value="/admin/data/contribute", method=RequestMethod.GET) // public String adminContributeDataset(Model model) { // model.addAttribute("dataset", new Dataset()); // // File rootFolder = new File(App.ROOT); // List<String> fileNames = Arrays.stream(rootFolder.listFiles()) // .map(f -> f.getError()) // .collect(Collectors.toList()); // // model.addAttribute("files", // Arrays.stream(rootFolder.listFiles()) // .sorted(Comparator.comparingLong(f -> -1 * f.lastModified())) // .map(f -> f.getError()) // .collect(Collectors.toList()) // ); // // return "admin_contribute_data"; // } // @RequestMapping(value="/admin/data/contribute", method=RequestMethod.POST) // public String validateAdminContributeDataset(@ModelAttribute("dataset") Dataset dataset, HttpSession session, @RequestParam("file") MultipartFile file, RedirectAttributes redirectAttributes) throws IOException { // BufferedOutputStream stream = null; // FileOutputStream fileOutputStream = null; // // TODO // // validation // // get file from user upload to server // if (!file.isEmpty()) { // try { // String fileName = getSessionIdOfLoggedInUser(session) + "-" + file.getOriginalFilename(); // fileOutputStream = new FileOutputStream(new File(App.ROOT + "/" + fileName)); // stream = new BufferedOutputStream(fileOutputStream); // FileCopyUtils.copy(file.getInputStream(), stream); // redirectAttributes.addFlashAttribute(MESSAGE, // "You successfully uploaded " + file.getOriginalFilename() + "!"); // datasetManager.addDataset(getSessionIdOfLoggedInUser(session), dataset, file.getOriginalFilename()); // } // catch (Exception e) { // redirectAttributes.addFlashAttribute(MESSAGE, // "You failed to upload " + file.getOriginalFilename() + " => " + e.getMessage()); // } finally { // if (stream != null) { // stream.close(); // } // if (fileOutputStream != null) { // fileOutputStream.close(); // } // } // } // else { // redirectAttributes.addFlashAttribute(MESSAGE, // "You failed to upload " + file.getOriginalFilename() + " because the file was empty"); // } // return "redirect:/admin"; // } // @RequestMapping("/admin/data/remove/{datasetId}") // public String adminRemoveDataset(@PathVariable Integer datasetId) { // datasetManager.removeDataset(datasetId); // return "redirect:/admin"; // } // @RequestMapping(value="/admin/node/add", method=RequestMethod.GET) // public String adminAddNode(Model model) { // model.addAttribute("node", new Node()); // return "admin_add_node"; // } // @RequestMapping(value="/admin/node/add", method=RequestMethod.POST) // public String adminAddNode(@ModelAttribute("node") Node node) { // // TODO // // validate fields, eg should be integer // nodeManager.addNode(node); // return "redirect:/admin"; // } //--------------------------Static pages for teams-------------------------- @RequestMapping("/teams/team_application_submitted") public String teamAppSubmitFromTeamsPage() { return "team_page_application_submitted"; } @RequestMapping("/teams/join_application_submitted/{teamName}") public String teamAppJoinFromTeamsPage(@PathVariable String teamName, Model model) throws WebServiceRuntimeException { log.info("Redirecting to join application submitted page"); HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = restTemplate.exchange(properties.getTeamByName(teamName), HttpMethod.GET, request, String.class); String responseBody = response.getBody().toString(); try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); ExceptionState exceptionState = ExceptionState.parseExceptionState(error.getError()); switch (exceptionState) { case TEAM_NOT_FOUND_EXCEPTION: log.warn("submitted join team request : team name error"); break; default: log.warn("submitted join team request : some other failure"); // possible sio or adapter connection fail break; } return "redirect:/teams/join_team"; } } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } Team2 one = extractTeamInfo(responseBody); model.addAttribute("team", one); return "team_page_join_application_submitted"; } //--------------------------Static pages for sign up-------------------------- @RequestMapping("/team_application_submitted") public String teamAppSubmit() { return "team_application_submitted"; } /** * A page to show new users has successfully registered to apply to join an existing team * The page contains the team owner information which the users requested to join * * @param model The model which is passed from signup * @return A success page otherwise an error page if the user tries to access this page directly */ @RequestMapping("/join_application_submitted") public String joinTeamAppSubmit(Model model) { // model attribute should be passed from /signup2 // team is required to display the team owner details if (model.containsAttribute("team")) { return "join_team_application_submitted"; } return "error"; } @RequestMapping("/email_not_validated") public String emailNotValidated() { return "email_not_validated"; } @RequestMapping("/team_application_under_review") public String teamAppUnderReview() { return "team_application_under_review"; } // model attribute name come from /login @RequestMapping("/email_checklist") public String emailChecklist(@ModelAttribute("statuschecklist") String status) { return "email_checklist"; } @RequestMapping("/join_application_awaiting_approval") public String joinTeamAppAwaitingApproval(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "join_team_application_awaiting_approval"; } //--------------------------Get List of scenarios filenames-------------------------- private List<String> getScenarioFileNameList() throws WebServiceRuntimeException { log.info("Retrieving scenario file names"); // List<String> scenarioFileNameList = null; // try { // scenarioFileNameList = IOUtils.readLines(getClass().getClassLoader().getResourceAsStream("scenarios"), StandardCharsets.UTF_8); // } catch (IOException e) { // throw new WebServiceRuntimeException(e.getMessage()); // } // File folder = null; // try { // folder = new ClassPathResource("scenarios").getFile(); // } catch (IOException e) { // throw new WebServiceRuntimeException(e.getMessage()); // } // List<String> scenarioFileNameList = new ArrayList<>(); // File[] files = folder.listFiles(); // for (File file : files) { // if (file.isFile()) { // scenarioFileNameList.add(file.getError()); // } // } // FIXME: hardcode list of filenames for now List<String> scenarioFileNameList = new ArrayList<>(); scenarioFileNameList.add("Scenario 1 - Experiment with a single node"); scenarioFileNameList.add("Scenario 2 - Experiment with 2 nodes and 10Gb link"); scenarioFileNameList.add("Scenario 3 - Experiment with 3 nodes in a LAN"); scenarioFileNameList.add("Scenario 4 - Experiment with 2 nodes and customized link property"); scenarioFileNameList.add("Scenario 5 - Single SDN switch connected to two nodes"); scenarioFileNameList.add("Scenario 6 - Tree Topology with configurable SDN switches"); // scenarioFileNameList.add("Scenario 4 - Two nodes linked with a 10Gbps SDN switch"); // scenarioFileNameList.add("Scenario 5 - Three nodes with Blockchain capabilities"); log.info("Scenario file list: {}", scenarioFileNameList); return scenarioFileNameList; } private String getScenarioContentsFromFile(String scenarioFileName) throws WebServiceRuntimeException { // FIXME: switch to better way of referencing scenario descriptions to actual filenames String actualScenarioFileName; if (scenarioFileName.contains("Scenario 1")) { actualScenarioFileName = "basic1.ns"; } else if (scenarioFileName.contains("Scenario 2")) { actualScenarioFileName = "basic2.ns"; } else if (scenarioFileName.contains("Scenario 3")) { actualScenarioFileName = "basic3.ns"; } else if (scenarioFileName.contains("Scenario 4")) { actualScenarioFileName = "basic4.ns"; } else if (scenarioFileName.contains("Scenario 5")) { actualScenarioFileName = "basic5.ns"; } else if (scenarioFileName.contains("Scenario 6")) { actualScenarioFileName = "basic6.ns"; } else { // defaults to basic single node actualScenarioFileName = "basic1.ns"; } try { log.info("Retrieving scenario files {}", getClass().getClassLoader().getResourceAsStream("scenarios/" + actualScenarioFileName)); List<String> lines = IOUtils.readLines(getClass().getClassLoader().getResourceAsStream("scenarios/" + actualScenarioFileName), StandardCharsets.UTF_8); StringBuilder sb = new StringBuilder(); for (String line : lines) { sb.append(line); sb.append(System.getProperty("line.separator")); } log.info("Experiment ns file contents: {}", sb); return sb.toString(); } catch (IOException e) { throw new WebServiceRuntimeException(e.getMessage()); } } //---Check if user is a team owner and has any join request waiting for approval---- private boolean hasAnyJoinRequest(HashMap<Integer, Team> teamMapOwnedByUser) { for (Map.Entry<Integer, Team> entry : teamMapOwnedByUser.entrySet()) { Team currTeam = entry.getValue(); if (currTeam.isUserJoinRequestEmpty() == false) { // at least one team has join user request return true; } } // loop through all teams but never return a single true // therefore, user's controlled teams has no join request return false; } //--------------------------MISC-------------------------- private int getSessionIdOfLoggedInUser(HttpSession session) { return Integer.parseInt(session.getAttribute(SESSION_LOGGED_IN_USER_ID).toString()); } private User2 extractUserInfo(String userJson) { User2 user2 = new User2(); if (userJson == null) { // return empty user return user2; } JSONObject object = new JSONObject(userJson); JSONObject userDetails = object.getJSONObject("userDetails"); JSONObject address = userDetails.getJSONObject("address"); user2.setId(object.getString("id")); user2.setFirstName(getJSONStr(userDetails.getString("firstName"))); user2.setLastName(getJSONStr(userDetails.getString("lastName"))); user2.setJobTitle(userDetails.getString("jobTitle")); user2.setEmail(userDetails.getString("email")); user2.setPhone(userDetails.getString("phone")); user2.setAddress1(address.getString("address1")); user2.setAddress2(address.getString("address2")); user2.setCountry(address.getString("country")); user2.setRegion(address.getString("region")); user2.setPostalCode(address.getString("zipCode")); user2.setCity(address.getString("city")); user2.setInstitution(userDetails.getString("institution")); user2.setInstitutionAbbreviation(userDetails.getString("institutionAbbreviation")); user2.setInstitutionWeb(userDetails.getString("institutionWeb")); user2.setStatus(object.getString("status")); user2.setEmailVerified(object.getBoolean("emailVerified")); // applicationDate is ZonedDateTime try { user2.setApplicationDate(object.get(APPLICATION_DATE).toString()); } catch (Exception e) { // since applicationDate date is a ZonedDateTime and not String // set to '?' at the html page log.warn("Error getting user application date {}", e); } return user2; } private Team2 extractTeamInfo(String json) { Team2 team2 = new Team2(); JSONObject object = new JSONObject(json); JSONArray membersArray = object.getJSONArray("members"); // createdDate is ZonedDateTime // processedDate is ZonedDateTime try { team2.setApplicationDate(object.get(APPLICATION_DATE).toString()); team2.setProcessedDate(object.get("processedDate").toString()); } catch (Exception e) { log.warn("Error getting team application date and/or processedDate {}", e); // created date is a ZonedDateTime // since created date and proccessed date is a ZonedDateTime and not String // both is set to '?' at the html page if exception } team2.setId(object.getString("id")); team2.setName(object.getString("name")); team2.setDescription(object.getString("description")); team2.setWebsite(object.getString("website")); team2.setOrganisationType(object.getString("organisationType")); team2.setStatus(object.getString("status")); team2.setVisibility(object.getString("visibility")); for (int i = 0; i < membersArray.length(); i++) { JSONObject memberObject = membersArray.getJSONObject(i); String userId = memberObject.getString("userId"); String teamMemberType = memberObject.getString(MEMBER_TYPE); String teamMemberStatus = memberObject.getString("memberStatus"); User2 myUser = invokeAndExtractUserInfo(userId); if (teamMemberType.equals(MemberType.MEMBER.name())) { // add to pending members list for Members Awaiting Approval function if (teamMemberStatus.equals(MemberStatus.PENDING.name())) { team2.addPendingMembers(myUser); } } else if (teamMemberType.equals(MemberType.OWNER.name())) { // explicit safer check team2.setOwner(myUser); } team2.addMembersToStatusMap(MemberStatus.valueOf(teamMemberStatus), myUser); } team2.setMembersCount(team2.getMembersStatusMap().get(MemberStatus.APPROVED).size()); return team2; } // use to extract JSON Strings from services // in the case where the JSON Strings are null, return "Connection Error" private String getJSONStr(String jsonString) { if (jsonString == null || jsonString.isEmpty()) { return CONNECTION_ERROR; } return jsonString; } /** * Checks if user is pending for join request approval from team leader * Use for fixing bug for view experiment page where users previously can view the experiments just by issuing a join request * * @param json the response body after calling team service * @param loginUserId the current logged in user id * @return True if the user is anything but APPROVED, false otherwise */ private boolean isMemberJoinRequestPending(String loginUserId, String json) { if (json == null) { return true; } JSONObject object = new JSONObject(json); JSONArray membersArray = object.getJSONArray("members"); for (int i = 0; i < membersArray.length(); i++) { JSONObject memberObject = membersArray.getJSONObject(i); String userId = memberObject.getString("userId"); String teamMemberStatus = memberObject.getString("memberStatus"); if (userId.equals(loginUserId) && !teamMemberStatus.equals(MemberStatus.APPROVED.toString())) { return true; } } log.info("User: {} is viewing experiment page", loginUserId); return false; } private Team2 extractTeamInfoUserJoinRequest(String userId, String json) { Team2 team2 = new Team2(); JSONObject object = new JSONObject(json); JSONArray membersArray = object.getJSONArray("members"); for (int i = 0; i < membersArray.length(); i++) { JSONObject memberObject = membersArray.getJSONObject(i); String uid = memberObject.getString("userId"); String teamMemberStatus = memberObject.getString("memberStatus"); if (uid.equals(userId) && teamMemberStatus.equals(MemberStatus.PENDING.toString())) { team2.setId(object.getString("id")); team2.setName(object.getString("name")); team2.setDescription(object.getString("description")); team2.setWebsite(object.getString("website")); team2.setOrganisationType(object.getString("organisationType")); team2.setStatus(object.getString("status")); team2.setVisibility(object.getString("visibility")); team2.setMembersCount(membersArray.length()); return team2; } } // no such member in the team found return null; } protected Dataset invokeAndExtractDataInfo(Long dataId) { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response = restTemplate.exchange(properties.getDataset(dataId.toString()), HttpMethod.GET, request, String.class); return extractDataInfo(response.getBody().toString()); } protected Dataset extractDataInfo(String json) { log.debug(json); JSONObject object = new JSONObject(json); Dataset dataset = new Dataset(); dataset.setId(object.getInt("id")); dataset.setName(object.getString("name")); dataset.setDescription(object.getString("description")); dataset.setContributorId(object.getString("contributorId")); dataset.addVisibility(object.getString("visibility")); dataset.addAccessibility(object.getString("accessibility")); try { dataset.setReleasedDate(getZonedDateTime(object.get("releasedDate").toString())); } catch (IOException e) { log.warn("Error getting released date {}", e); dataset.setReleasedDate(null); } dataset.setCategoryId(object.getInt("categoryId")); dataset.setLicenseId(object.getInt("licenseId")); dataset.setContributor(invokeAndExtractUserInfo(dataset.getContributorId())); dataset.setCategory(invokeAndExtractCategoryInfo(dataset.getCategoryId())); dataset.setLicense(invokeAndExtractLicenseInfo(dataset.getLicenseId())); JSONArray resources = object.getJSONArray("resources"); for (int i = 0; i < resources.length(); i++) { JSONObject resource = resources.getJSONObject(i); DataResource dataResource = new DataResource(); dataResource.setId(resource.getLong("id")); dataResource.setUri(resource.getString("uri")); dataResource.setMalicious(resource.getBoolean("malicious")); dataResource.setScanned(resource.getBoolean("scanned")); dataset.addResource(dataResource); } JSONArray approvedUsers = object.getJSONArray("approvedUsers"); for (int i = 0; i < approvedUsers.length(); i++) { dataset.addApprovedUser(approvedUsers.getString(i)); } JSONArray keywords = object.getJSONArray("keywords"); List<String> keywordList = new ArrayList<>(); for (int i = 0; i < keywords.length(); i++) { keywordList.add(keywords.getString(i)); } dataset.setKeywordList(keywordList); return dataset; } protected DataCategory extractCategoryInfo(String json) { log.debug(json); DataCategory dataCategory = new DataCategory(); JSONObject object = new JSONObject(json); dataCategory.setId(object.getLong("id")); dataCategory.setName(object.getString("name")); dataCategory.setDescription(object.getString("description")); return dataCategory; } protected DataLicense extractLicenseInfo(String json) { log.debug(json); DataLicense dataLicense = new DataLicense(); JSONObject object = new JSONObject(json); dataLicense.setId(object.getLong("id")); dataLicense.setName(object.getString("name")); dataLicense.setAcronym(object.getString("acronym")); dataLicense.setDescription(object.getString("description")); dataLicense.setLink(object.getString("link")); return dataLicense; } protected DataCategory invokeAndExtractCategoryInfo(Integer categoryId) { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response; try { response = restTemplate.exchange(properties.getCategory(categoryId), HttpMethod.GET, request, String.class); } catch (Exception e) { log.warn("Data service not available to retrieve Category: {}", categoryId); return new DataCategory(); } return extractCategoryInfo(response.getBody().toString()); } protected DataLicense invokeAndExtractLicenseInfo(Integer licenseId) { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response; try { response = restTemplate.exchange(properties.getLicense(licenseId), HttpMethod.GET, request, String.class); } catch (Exception e) { log.warn("Data service not available to retrieve License: {}", licenseId); return new DataLicense(); } return extractLicenseInfo(response.getBody().toString()); } protected User2 invokeAndExtractUserInfo(String userId) { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response; try { response = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); } catch (Exception e) { log.warn("User service not available to retrieve User: {}", userId); return new User2(); } return extractUserInfo(response.getBody().toString()); } private Team2 invokeAndExtractTeamInfo(String teamId) { HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity responseEntity = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, request, String.class); return extractTeamInfo(responseEntity.getBody().toString()); } private Experiment2 extractExperiment(String experimentJson) { log.info("{}", experimentJson); Experiment2 experiment2 = new Experiment2(); JSONObject object = new JSONObject(experimentJson); experiment2.setId(object.getLong("id")); experiment2.setUserId(object.getString("userId")); experiment2.setTeamId(object.getString(TEAM_ID)); experiment2.setTeamName(object.getString(TEAM_NAME)); experiment2.setName(object.getString("name")); experiment2.setDescription(object.getString("description")); experiment2.setNsFile(object.getString("nsFile")); experiment2.setNsFileContent(object.getString("nsFileContent")); experiment2.setIdleSwap(object.getInt("idleSwap")); experiment2.setMaxDuration(object.getInt("maxDuration")); try { experiment2.setCreatedDate(object.get("createdDate").toString()); } catch (Exception e) { experiment2.setCreatedDate(""); } try { experiment2.setLastModifiedDate(object.get("lastModifiedDate").toString()); } catch (Exception e) { experiment2.setLastModifiedDate(""); } return experiment2; } private Realization invokeAndExtractRealization(String teamName, Long id) { HttpEntity<String> request = createHttpEntityHeaderOnly(); restTemplate.setErrorHandler(new MyResponseErrorHandler()); ResponseEntity response = null; try { log.info("retrieving the latest exp status: {}", properties.getRealizationByTeam(teamName, id.toString())); response = restTemplate.exchange(properties.getRealizationByTeam(teamName, id.toString()), HttpMethod.GET, request, String.class); } catch (Exception e) { return getCleanRealization(); } String responseBody; if (response.getBody() == null) { return getCleanRealization(); } else { responseBody = response.getBody().toString(); } try { if (RestUtil.isError(response.getStatusCode())) { MyErrorResource error = objectMapper.readValue(responseBody, MyErrorResource.class); log.warn("error in retrieving realization for team: {}, realization: {}", teamName, id); return getCleanRealization(); } else { // will throw JSONException if the format return by sio is not a valid JSOn format // will occur if the realization details are still in the old format return extractRealization(responseBody); } } catch (IOException | JSONException e) { return getCleanRealization(); } } private Realization extractRealization(String json) { log.info("extracting realization: {}", json); Realization realization = new Realization(); JSONObject object = new JSONObject(json); realization.setExperimentId(object.getLong("experimentId")); realization.setExperimentName(object.getString("experimentName")); realization.setUserId(object.getString("userId")); realization.setTeamId(object.getString(TEAM_ID)); realization.setState(object.getString("state")); String exp_report = ""; Object expDetailsObject = object.get("details"); log.info("exp detail object: {}", expDetailsObject); if (expDetailsObject == JSONObject.NULL || expDetailsObject.toString().isEmpty()) { log.info("set details empty"); realization.setDetails(""); realization.setNumberOfNodes(0); } else { log.info("exp report to string: {}", expDetailsObject.toString()); exp_report = expDetailsObject.toString(); realization.setDetails(exp_report); JSONObject nodesInfoObject = new JSONObject(expDetailsObject.toString()); for (Object key : nodesInfoObject.keySet()) { Map<String, String> nodeDetails = new HashMap<>(); String nodeName = (String) key; JSONObject nodeDetailsJson = new JSONObject(nodesInfoObject.get(nodeName).toString()); nodeDetails.put("os", getValueFromJSONKey(nodeDetailsJson, "os")); nodeDetails.put("qualifiedName", getValueFromJSONKey(nodeDetailsJson, "qualifiedName")); nodeDetails.put(NODE_ID, getValueFromJSONKey(nodeDetailsJson, NODE_ID)); realization.addNodeDetails(nodeName, nodeDetails); } log.info("nodes info object: {}", nodesInfoObject); realization.setNumberOfNodes(nodesInfoObject.keySet().size()); } return realization; } // gets the value that corresponds to a particular key // checks if a particular key in the JSONObject exists // returns the value if the key exists, otherwise, returns N.A. private String getValueFromJSONKey(JSONObject json, String key) { if (json.has(key)) { return json.get(key).toString(); } return NOT_APPLICABLE; } /** * @param zonedDateTimeJSON JSON string * @return a date in the format MMM-d-yyyy */ protected String formatZonedDateTime(String zonedDateTimeJSON) throws Exception { ZonedDateTime zonedDateTime = getZonedDateTime(zonedDateTimeJSON); DateTimeFormatter format = DateTimeFormatter.ofPattern("MMM-d-yyyy"); return zonedDateTime.format(format); } protected ZonedDateTime getZonedDateTime(String zonedDateTimeJSON) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new JavaTimeModule()); return mapper.readValue(zonedDateTimeJSON, ZonedDateTime.class); } /** * Creates a HttpEntity with a request body and header but no authorization header * To solve the expired jwt token * * @param jsonString The JSON request converted to string * @return A HttpEntity request * @see HttpEntity createHttpEntityHeaderOnly() for request with only header */ protected HttpEntity<String> createHttpEntityWithBodyNoAuthHeader(String jsonString) { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); return new HttpEntity<>(jsonString, headers); } /** * Creates a HttpEntity that contains only a header and empty body but no authorization header * To solve the expired jwt token * * @return A HttpEntity request * @see HttpEntity createHttpEntityWithBody() for request with both body and header */ protected HttpEntity<String> createHttpEntityHeaderOnlyNoAuthHeader() { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); return new HttpEntity<>(headers); } /** * Creates a HttpEntity with a request body and header * * @param jsonString The JSON request converted to string * @return A HttpEntity request * @implNote Authorization header must be set to the JwTToken in the format [Bearer: TOKEN_ID] * @see HttpEntity createHttpEntityHeaderOnly() for request with only header */ protected HttpEntity<String> createHttpEntityWithBody(String jsonString) { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); headers.set("Authorization", httpScopedSession.getAttribute(webProperties.getSessionJwtToken()).toString()); return new HttpEntity<>(jsonString, headers); } /** * Creates a HttpEntity that contains only a header and empty body * * @return A HttpEntity request * @implNote Authorization header must be set to the JwTToken in the format [Bearer: TOKEN_ID] * @see HttpEntity createHttpEntityWithBody() for request with both body and header */ protected HttpEntity<String> createHttpEntityHeaderOnly() { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); headers.set("Authorization", httpScopedSession.getAttribute(webProperties.getSessionJwtToken()).toString()); return new HttpEntity<>(headers); } private void setSessionVariables(HttpSession session, String loginEmail, String id, String firstName, String userRoles, String token) { User2 user = invokeAndExtractUserInfo(id); session.setAttribute(webProperties.getSessionEmail(), loginEmail); session.setAttribute(webProperties.getSessionUserId(), id); session.setAttribute(webProperties.getSessionUserFirstName(), firstName); session.setAttribute(webProperties.getSessionRoles(), userRoles); session.setAttribute(webProperties.getSessionJwtToken(), "Bearer " + token); log.info("Session variables - sessionLoggedEmail: {}, id: {}, name: {}, roles: {}, token: {}", loginEmail, id, user.getFirstName(), userRoles, token); } private void removeSessionVariables(HttpSession session) { log.info("removing session variables: email: {}, userid: {}, user first name: {}", session.getAttribute(webProperties.getSessionEmail()), session.getAttribute(webProperties.getSessionUserId()), session.getAttribute(webProperties.getSessionUserFirstName())); session.removeAttribute(webProperties.getSessionEmail()); session.removeAttribute(webProperties.getSessionUserId()); session.removeAttribute(webProperties.getSessionUserFirstName()); session.removeAttribute(webProperties.getSessionRoles()); session.removeAttribute(webProperties.getSessionJwtToken()); session.invalidate(); } protected boolean validateIfAdmin(HttpSession session) { //log.info("User: {} is logged on as: {}", session.getAttribute(webProperties.getSessionEmail()), session.getAttribute(webProperties.getSessionRoles())); return session.getAttribute(webProperties.getSessionRoles()).equals(UserType.ADMIN.toString()); } /** * Ensure that only users of the team can realize or un-realize experiment * A pre-condition is that the users must be approved. * Teams must also be approved. * * @return the main experiment page */ private boolean checkPermissionRealizeExperiment(Realization realization, HttpSession session) { // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(session.getAttribute("id").toString()), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); if (teamId.equals(realization.getTeamId())) { return true; } } return false; } private String getTeamStatus(String teamId) { Team2 team = invokeAndExtractTeamInfo(teamId); return team.getStatus(); } private Realization getCleanRealization() { Realization realization = new Realization(); realization.setExperimentId(0L); realization.setExperimentName(""); realization.setUserId(""); realization.setTeamId(""); realization.setState(RealizationState.ERROR.toString()); realization.setDetails(""); realization.setNumberOfNodes(0); return realization; } /** * Computes the number of teams that the user is in and the number of running experiments to populate data for the user dashboard * * @return a map in the form teams: numberOfTeams, experiments: numberOfExperiments */ private Map<String, Integer> getUserDashboardStats(String userId) { int numberOfRunningExperiments = 0; Map<String, Integer> userDashboardStats = new HashMap<>(); // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); int numberOfApprovedTeam = 0; for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); if (!isMemberJoinRequestPending(userId, teamResponseBody)) { // get experiments lists of the teams HttpEntity<String> expRequest = createHttpEntityHeaderOnly(); ResponseEntity expRespEntity = restTemplate.exchange(properties.getExpListByTeamId(teamId), HttpMethod.GET, expRequest, String.class); JSONArray experimentsArray = new JSONArray(expRespEntity.getBody().toString()); numberOfRunningExperiments = getNumberOfRunningExperiments(numberOfRunningExperiments, experimentsArray); numberOfApprovedTeam ++; } } userDashboardStats.put(USER_DASHBOARD_APPROVED_TEAMS, numberOfApprovedTeam); userDashboardStats.put(USER_DASHBOARD_RUNNING_EXPERIMENTS, numberOfRunningExperiments); // userDashboardStats.put(USER_DASHBOARD_FREE_NODES, getNodes(NodeType.FREE)); return userDashboardStats; } private int getNumberOfRunningExperiments(int numberOfRunningExperiments, JSONArray experimentsArray) { for (int k = 0; k < experimentsArray.length(); k++) { Experiment2 experiment2 = extractExperiment(experimentsArray.getJSONObject(k).toString()); Realization realization = invokeAndExtractRealization(experiment2.getTeamName(), experiment2.getId()); if (realization.getState().equals(RealizationState.RUNNING.toString())) { numberOfRunningExperiments++; } } return numberOfRunningExperiments; } private SortedMap<String, Map<String, String>> getGlobalImages() throws IOException { SortedMap<String, Map<String, String>> globalImagesMap = new TreeMap<>(); log.info("Retrieving list of global images from: {}", properties.getGlobalImages()); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getGlobalImages(), HttpMethod.GET, request, String.class); ObjectMapper mapper = new ObjectMapper(); String json = new JSONObject(response.getBody().toString()).getString("images"); globalImagesMap = mapper.readValue(json, new TypeReference<SortedMap<String, Map<String, String>>>() { }); } catch (RestClientException e) { log.warn("Error connecting to service-image: {}", e); } return globalImagesMap; } private int getNodes(NodeType nodeType) { String nodesCount; log.info("Retrieving number of " + nodeType + " nodes from: {}", properties.getNodes(nodeType)); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getNodes(nodeType), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(response.getBody().toString()); nodesCount = object.getString(nodeType.name()); } catch (RestClientException e) { log.warn(ERROR_CONNECTING_TO_SERVICE_TELEMETRY, e); nodesCount = "0"; } return Integer.parseInt(nodesCount); } private List<TeamUsageInfo> getTeamsUsageStatisticsForUser(String userId) { List<TeamUsageInfo> usageInfoList = new ArrayList<>(); // get list of teamids HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity userRespEntity = restTemplate.exchange(properties.getUser(userId), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(userRespEntity.getBody().toString()); JSONArray teamIdsJsonArray = object.getJSONArray("teams"); // get team info by team id for (int i = 0; i < teamIdsJsonArray.length(); i++) { String teamId = teamIdsJsonArray.get(i).toString(); HttpEntity<String> teamRequest = createHttpEntityHeaderOnly(); ResponseEntity teamResponse = restTemplate.exchange(properties.getTeamById(teamId), HttpMethod.GET, teamRequest, String.class); String teamResponseBody = teamResponse.getBody().toString(); if (!isMemberJoinRequestPending(userId, teamResponseBody)) { TeamUsageInfo usageInfo = new TeamUsageInfo(); usageInfo.setId(teamId); usageInfo.setName(new JSONObject(teamResponseBody).getString("name")); usageInfo.setUsage(getUsageStatisticsByTeamId(teamId)); usageInfoList.add(usageInfo); } } return usageInfoList; } private String getUsageStatisticsByTeamId(String id) { log.info("Getting usage statistics for team {}", id); HttpEntity<String> request = createHttpEntityHeaderOnly(); ResponseEntity response; try { response = restTemplate.exchange(properties.getUsageStat(id), HttpMethod.GET, request, String.class); } catch (RestClientException e) { log.warn("Error connecting to sio get usage statistics {}", e); return "?"; } return response.getBody().toString(); } private TeamQuota extractTeamQuotaInfo(String responseBody) { JSONObject object = new JSONObject(responseBody); TeamQuota teamQuota = new TeamQuota(); Double charges = Double.parseDouble(accountingProperties.getCharges()); // amountUsed from SIO will never be null => not checking for null value String usage = object.getString("usage"); // getting usage in String BigDecimal amountUsed = new BigDecimal(usage); // using BigDecimal to handle currency amountUsed = amountUsed.multiply(new BigDecimal(charges)); // usage X charges //quota passed from SIO can be null , so we have to check for null value if (object.has("quota")) { Object budgetObject = object.optString("quota", null); if (budgetObject == null) { teamQuota.setBudget(""); // there is placeholder here teamQuota.setResourcesLeft("Unlimited"); // not placeholder so can pass string over } else { Double budgetInDouble = object.getDouble("quota"); // retrieve budget from SIO in Double BigDecimal budgetInBD = BigDecimal.valueOf(budgetInDouble); // handling currency using BigDecimal // calculate resoucesLeft BigDecimal resourceLeftInBD = budgetInBD.subtract(amountUsed); resourceLeftInBD = resourceLeftInBD.divide(new BigDecimal(charges), 0, BigDecimal.ROUND_DOWN); budgetInBD = budgetInBD.setScale(2, BigDecimal.ROUND_HALF_UP); // set budget teamQuota.setBudget(budgetInBD.toString()); //set resroucesLeft if (resourceLeftInBD.compareTo(BigDecimal.valueOf(0)) < 0) teamQuota.setResourcesLeft("0"); else teamQuota.setResourcesLeft(resourceLeftInBD.toString()); } } //set teamId and amountUsed teamQuota.setTeamId(object.getString(TEAM_ID)); amountUsed = amountUsed.setScale(2, BigDecimal.ROUND_HALF_UP); teamQuota.setAmountUsed(amountUsed.toString()); return teamQuota; } /** * Invokes the get nodes status in the telemetry service * @return a map containing a list of nodes status by their type */ private Map<String, List<Map<String, String>>> getNodesStatus() throws IOException { log.info("Getting all nodes' status from: {}", properties.getNodesStatus()); Map<String, List<Map<String, String>>> output = new HashMap<>(); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getNodesStatus(), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(response.getBody().toString()); if (object == JSONObject.NULL || object.length() == 0) { return output; } else { // loop through the object as there may be more than one machine type for (int i = 0; i < object.names().length(); i++) { // for each machine type, get all the current nodes status String currentMachineType = object.names().getString(i); // converts the JSON Array of the form [ { id : A, status : B, type : C } ] into a proper list of map List<Map<String, String>> nodesList = objectMapper.readValue(object.getJSONArray(currentMachineType).toString(), new TypeReference<List<Map>>(){}); output.put(currentMachineType, nodesList); } } } catch (RestClientException e) { log.warn(ERROR_CONNECTING_TO_SERVICE_TELEMETRY, e); return new HashMap<>(); } log.info("Finish getting all nodes: {}", output); return output; } private Map<String,String> getTestbedStats() { Map<String, String> statsMap = new HashMap<>(); log.info("Retrieving number of logged in users and running experiments from: {}", properties.getTestbedStats()); try { HttpEntity<String> request = createHttpEntityHeaderOnlyNoAuthHeader(); ResponseEntity response = restTemplate.exchange(properties.getTestbedStats(), HttpMethod.GET, request, String.class); JSONObject object = new JSONObject(response.getBody().toString()); statsMap.put(USER_DASHBOARD_LOGGED_IN_USERS_COUNT, object.getString("users")); statsMap.put(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT, object.getString("experiments")); } catch (RestClientException e) { log.warn(ERROR_CONNECTING_TO_SERVICE_TELEMETRY, e); statsMap.put(USER_DASHBOARD_LOGGED_IN_USERS_COUNT, "0"); statsMap.put(USER_DASHBOARD_RUNNING_EXPERIMENTS_COUNT, "0"); } return statsMap; } }
add permission check to view experiment profile
src/main/java/sg/ncl/MainController.java
add permission check to view experiment profile
Java
apache-2.0
de4294517af7afcd3c8da71aecc84e223f34c67a
0
codehaus/mvel,codehaus/mvel
package org.mvel.optimizers.impl.asm; import org.mvel.*; import org.mvel.integration.VariableResolverFactory; import org.mvel.optimizers.AccessorCompiler; import org.mvel.optimizers.ExecutableStatement; import org.mvel.optimizers.OptimizationNotSupported; import org.mvel.util.ParseTools; import org.mvel.util.PropertyTools; import org.mvel.util.StringAppender; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.FieldVisitor; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import static org.objectweb.asm.Opcodes.*; import static org.objectweb.asm.Type.*; import java.lang.reflect.*; import java.util.*; public class ASMAccessorCompiler implements AccessorCompiler { private static final int OPCODES_VERSION; static { String javaVersion = System.getProperty("java.version"); if (javaVersion.startsWith("1.4")) OPCODES_VERSION = Opcodes.V1_4; else if (javaVersion.startsWith("1.5")) OPCODES_VERSION = Opcodes.V1_5; else if (javaVersion.startsWith("1.6") || javaVersion.startsWith("1.7")) OPCODES_VERSION = Opcodes.V1_6; else OPCODES_VERSION = Opcodes.V1_2; } private int start = 0; private int cursor = 0; private char[] property; private int length; private Object ctx; private Object thisRef; private VariableResolverFactory variableFactory; private static final int DONE = -1; private static final int BEAN = 0; private static final int METH = 1; private static final int COL = 2; private static final Object[] EMPTYARG = new Object[0]; private boolean first = true; private String className; private ClassWriter cw; private MethodVisitor mv; private Object val; private int stacksize = 1; private long time; private int inputs; private ArrayList<ExecutableStatement> compiledInputs; private Class returnType; public ASMAccessorCompiler(char[] property, Object ctx, Object thisRef, VariableResolverFactory variableResolverFactory) { this.property = property; this.ctx = ctx; this.variableFactory = variableResolverFactory; this.thisRef = thisRef; } public ASMAccessorCompiler() { } public Accessor compile(char[] property, Object staticContext, Object thisRef, VariableResolverFactory factory, boolean root) { time = System.currentTimeMillis(); inputs = 0; compiledInputs = new ArrayList<ExecutableStatement>(); start = cursor = 0; this.first = true; this.val = null; this.length = property.length; this.property = property; this.ctx = staticContext; this.thisRef = thisRef; this.variableFactory = factory; cw = new ClassWriter(ClassWriter.COMPUTE_MAXS + ClassWriter.COMPUTE_FRAMES); cw.visit(OPCODES_VERSION, Opcodes.ACC_PUBLIC + Opcodes.ACC_SUPER, className = "ASMAccessorImpl_" + String.valueOf(cw.hashCode()).replaceAll("\\-", "_"), null, "java/lang/Object", new String[]{"org/mvel/Accessor"}); MethodVisitor m = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null); m.visitCode(); m.visitVarInsn(Opcodes.ALOAD, 0); m.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/Object", "<init>", "()V"); m.visitInsn(Opcodes.RETURN); m.visitMaxs(1, 1); m.visitEnd(); mv = cw.visitMethod(ACC_PUBLIC, "getValue", "(Ljava/lang/Object;Ljava/lang/Object;Lorg/mvel/integration/VariableResolverFactory;)Ljava/lang/Object;", null, null); mv.visitCode(); return compileAccessor(); } public Accessor compileAccessor() { debug("\n{Initiate Compile: " + new String(property) + "}\n"); Object curr = ctx; try { while (cursor < length) { switch (nextToken()) { case BEAN: curr = getBeanProperty(curr, capture()); break; case METH: curr = getMethod(curr, capture()); break; case COL: curr = getCollectionProperty(curr, capture()); break; case DONE: break; } first = false; } val = curr; if (returnType != null && returnType.isPrimitive()) { //noinspection unchecked wrapPrimitive(returnType); } if (returnType == void.class) { debug("ACONST_NULL"); mv.visitInsn(ACONST_NULL); } debug("ARETURN"); mv.visitInsn(ARETURN); debug("\n{METHOD STATS (maxstack=" + stacksize + ")}\n"); mv.visitMaxs(stacksize, 1); mv.visitEnd(); buildInputs(); cw.visitEnd(); Class cls = loadClass(cw.toByteArray()); debug("[MVEL JIT Completed Optimization <<" + new String(property) + ">>]::" + cls + " (time: " + (System.currentTimeMillis() - time) + "ms)"); Accessor a; if (inputs == 0) { a = (Accessor) cls.newInstance(); } else { Class[] parms = new Class[inputs]; for (int i = 0; i < inputs; i++) { parms[i] = ExecutableStatement.class; } a = (Accessor) cls.getConstructor(parms).newInstance(compiledInputs.toArray(new ExecutableStatement[compiledInputs.size()])); } debug("[MVEL JIT Test Output: " + a.getValue(ctx, thisRef, variableFactory) + "]"); return a; } catch (InvocationTargetException e) { throw new PropertyAccessException("could not access property", e); } catch (IllegalAccessException e) { throw new PropertyAccessException("could not access property", e); } catch (IndexOutOfBoundsException e) { throw new PropertyAccessException("array or collection index out of bounds (property: " + new String(property) + ")", e); } catch (PropertyAccessException e) { throw new PropertyAccessException("failed to access property: <<" + new String(property) + ">> in: " + (ctx != null ? ctx.getClass() : null), e); } catch (CompileException e) { throw e; } catch (NullPointerException e) { throw new PropertyAccessException("null pointer exception in property: " + new String(property), e); } catch (OptimizationNotSupported e) { throw e; } catch (Exception e) { throw new PropertyAccessException("unknown exception in expression: " + new String(property), e); } } private int nextToken() { switch (property[start = cursor]) { case'[': return COL; case'.': cursor = ++start; } //noinspection StatementWithEmptyBody while (++cursor < length && Character.isJavaIdentifierPart(property[cursor])) ; if (cursor < length) { switch (property[cursor]) { case'[': return COL; case'(': return METH; default: return 0; } } return 0; } private String capture() { return new String(property, start, cursor - start); } private Object getBeanProperty(Object ctx, String property) throws IllegalAccessException, InvocationTargetException { debug("{bean: " + property + "}"); Class cls = (ctx instanceof Class ? ((Class) ctx) : ctx != null ? ctx.getClass() : null); Member member = cls != null ? PropertyTools.getFieldOrAccessor(cls, property) : null; if (first && variableFactory != null && variableFactory.isResolveable(property)) { try { debug("ALOAD 3"); mv.visitVarInsn(ALOAD, 3); debug("LDC :" + property); mv.visitLdcInsn(property); debug("INVOKEINTERFACE org/mvel/integration/VariableResolverFactory.getVariableResolver"); mv.visitMethodInsn(INVOKEINTERFACE, "org/mvel/integration/VariableResolverFactory", "getVariableResolver", "(Ljava/lang/String;)Lorg/mvel/integration/VariableResolver;"); debug("INVOKEINTERFACE org/mvel/integration/VariableResolver.getValue"); mv.visitMethodInsn(INVOKEINTERFACE, "org/mvel/integration/VariableResolver", "getValue", "()Ljava/lang/Object;"); } catch (Exception e) { throw new OptimizationFailure("critical error in JIT", e); } return variableFactory.getVariableResolver(property).getValue(); } else if (member instanceof Field) { Object o = ((Field) member).get(ctx); if (first) { debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); } debug("CHECKCAST " + getInternalName(cls)); mv.visitTypeInsn(CHECKCAST, getInternalName(cls)); debug("GETFIELD " + property + ":" + getDescriptor(((Field) member).getType())); mv.visitFieldInsn(GETFIELD, getInternalName(cls), property, getDescriptor(((Field) member).getType())); // addAccessorComponent(cls, property, FIELD, ((Field) member).getType()); return o; } else if (member != null) { if (first) { debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); } debug("CHECKCAST " + getInternalName(member.getDeclaringClass())); mv.visitTypeInsn(CHECKCAST, getInternalName(member.getDeclaringClass())); returnType = ((Method) member).getReturnType(); debug("INVOKEVIRTUAL " + member.getName() + ":" + returnType); mv.visitMethodInsn(INVOKEVIRTUAL, getInternalName(member.getDeclaringClass()), member.getName(), getMethodDescriptor((Method) member)); stacksize++; return ((Method) member).invoke(ctx, EMPTYARG); } else if (ctx instanceof Map && ((Map) ctx).containsKey(property)) { debug("CHECKCAST java/util/Map"); mv.visitTypeInsn(CHECKCAST, "java/util/Map"); debug("LDC: \"" + property + "\""); mv.visitLdcInsn(property); debug("INVOKEINTERFACE: get"); mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Map", "get", "(Ljava/lang/Object;)Ljava/lang/Object;"); return ((Map) ctx).get(property); } else if ("this".equals(property)) { debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); // load the thisRef value. return this.thisRef; } else if (Token.LITERALS.containsKey(property)) { return Token.LITERALS.get(property); } else { Class tryStaticMethodRef = tryStaticAccess(); if (tryStaticMethodRef != null) { throw new OptimizationNotSupported("class literal: " + tryStaticMethodRef); } else throw new PropertyAccessException("could not access property (" + property + ")"); } } private void whiteSpaceSkip() { if (cursor < length) //noinspection StatementWithEmptyBody while (Character.isWhitespace(property[cursor]) && ++cursor < length) ; } private boolean scanTo(char c) { for (; cursor < length; cursor++) { if (property[cursor] == c) { return true; } } return false; } private int containsStringLiteralTermination() { int pos = cursor; for (pos--; pos > 0; pos--) { if (property[pos] == '\'' || property[pos] == '"') return pos; else if (!Character.isWhitespace(property[pos])) return pos; } return -1; } /** * Handle accessing a property embedded in a collection, map, or array * * @param ctx - * @param prop - * @return - * @throws Exception - */ private Object getCollectionProperty(Object ctx, String prop) throws Exception { if (prop.length() > 0) ctx = getBeanProperty(ctx, prop); debug("{collection: " + prop + "} ctx=" + ctx); int start = ++cursor; whiteSpaceSkip(); if (cursor == length) throw new PropertyAccessException("unterminated '['"); String item; if (property[cursor] == '\'' || property[cursor] == '"') { start++; int end; if (!scanTo(']')) throw new PropertyAccessException("unterminated '['"); if ((end = containsStringLiteralTermination()) == -1) throw new PropertyAccessException("unterminated string literal in collection accessor"); item = new String(property, start, end - start); } else { if (!scanTo(']')) throw new PropertyAccessException("unterminated '['"); item = new String(property, start, cursor - start); } ++cursor; if (ctx instanceof Map) { debug("CHECKCAST java/util/Map"); mv.visitTypeInsn(CHECKCAST, "java/util/Map"); debug("LDC: \"" + item + "\""); mv.visitLdcInsn(item); debug("INVOKEINTERFACE: get"); mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Map", "get", "(Ljava/lang/Object;)Ljava/lang/Object;"); return ((Map) ctx).get(item); } else if (ctx instanceof List) { int index = Integer.parseInt(item); debug("CHECKCAST java/util/List"); mv.visitTypeInsn(CHECKCAST, "java/util/List"); debug("BIGPUSH: " + 6); mv.visitIntInsn(BIPUSH, index); debug("INVOKEINTERFACE: java/util/List.get"); mv.visitMethodInsn(INVOKEINTERFACE, "java/util/List", "get", "(I)Ljava/lang/Object;"); return ((List) ctx).get(index); } else if (ctx instanceof Collection) { int count = Integer.parseInt(item); if (count > ((Collection) ctx).size()) throw new PropertyAccessException("index [" + count + "] out of bounds on collection"); Iterator iter = ((Collection) ctx).iterator(); for (int i = 0; i < count; i++) iter.next(); return iter.next(); } else if (ctx instanceof Object[]) { int index = Integer.parseInt(item); debug("CHECKCAST [Ljava/lang/Object;"); mv.visitTypeInsn(CHECKCAST, "[Ljava/lang/Object;"); if (index < 6) { switch (index) { case 0: debug("ICONST_0"); mv.visitInsn(ICONST_0); break; case 1: debug("ICONST_1"); mv.visitInsn(ICONST_1); break; case 2: debug("ICONST_2"); mv.visitInsn(ICONST_2); break; case 3: debug("ICONST_3"); mv.visitInsn(ICONST_3); break; case 4: debug("ICONST_4"); mv.visitInsn(ICONST_4); break; case 5: debug("ICONST_5"); mv.visitInsn(ICONST_5); break; } } else { debug("BIPUSH " + index); mv.visitIntInsn(BIPUSH, index); } mv.visitInsn(AALOAD); return ((Object[]) ctx)[index]; } else if (ctx instanceof CharSequence) { int index = Integer.parseInt(item); mv.visitIntInsn(BIPUSH, index); mv.visitMethodInsn(INVOKEINTERFACE, "java/lang/CharSequence", "charAt", "(I)C"); return ((CharSequence) ctx).charAt(index); } else { throw new PropertyAccessException("illegal use of []: unknown type: " + (ctx == null ? null : ctx.getClass().getName())); } } private static final Map<String, ExecutableStatement[]> SUBEXPRESSION_CACHE = new WeakHashMap<String, ExecutableStatement[]>(); /** * Find an appropriate method, execute it, and return it's response. * * @param ctx - * @param name - * @return - * @throws Exception - */ @SuppressWarnings({"unchecked"}) private Object getMethod(Object ctx, String name) throws Exception { debug("{method: " + name + "}"); int st = cursor; int depth = 1; while (cursor++ < length - 1 && depth != 0) { switch (property[cursor]) { case'(': depth++; continue; case')': depth--; } } cursor--; String tk = (cursor - st) > 1 ? new String(property, st + 1, cursor - st - 1) : ""; cursor++; Object[] preConvArgs; Object[] args; ExecutableStatement[] es; if (tk.length() == 0) { //noinspection ZeroLengthArrayAllocation args = new Object[0]; //noinspection ZeroLengthArrayAllocation preConvArgs = new Object[0]; es = null; } else { if (SUBEXPRESSION_CACHE.containsKey(tk)) { es = SUBEXPRESSION_CACHE.get(tk); args = new Object[es.length]; preConvArgs = new Object[es.length]; for (int i = 0; i < es.length; i++) { preConvArgs[i] = args[i] = es[i].getValue(this.ctx, variableFactory); } } else { String[] subtokens = ParseTools.parseParameterList(tk.toCharArray(), 0, -1); es = new ExecutableStatement[subtokens.length]; args = new Object[subtokens.length]; preConvArgs = new Object[es.length]; for (int i = 0; i < subtokens.length; i++) { preConvArgs[i] = args[i] = (es[i] = (ExecutableStatement) ExpressionParser.compileExpression(subtokens[i])).getValue(this.ctx, variableFactory); } SUBEXPRESSION_CACHE.put(tk, es); } } if (es != null) { for (ExecutableStatement e : es) compiledInputs.add(e); } /** * If the target object is an instance of java.lang.Class itself then do not * adjust the Class scope target. */ Class cls = ctx instanceof Class ? (Class) ctx : ctx.getClass(); Method m; Class[] parameterTypes = null; /** * If we have not cached the method then we need to go ahead and try to resolve it. */ /** * Try to find an instance method from the class target. */ if ((m = ParseTools.getBestCanadidate(args, name, cls.getMethods())) != null) { parameterTypes = m.getParameterTypes(); } if (m == null) { /** * If we didn't find anything, maybe we're looking for the actual java.lang.Class methods. */ if ((m = ParseTools.getBestCanadidate(args, name, cls.getClass().getDeclaredMethods())) != null) { parameterTypes = m.getParameterTypes(); } } if (m == null) { StringAppender errorBuild = new StringAppender(); for (int i = 0; i < args.length; i++) { errorBuild.append(parameterTypes[i] != null ? parameterTypes[i].getClass().getName() : null); if (i < args.length - 1) errorBuild.append(", "); } throw new PropertyAccessException("unable to resolve method: " + cls.getName() + "." + name + "(" + errorBuild.toString() + ") [arglength=" + args.length + "]"); } else { if (es != null) { ExecutableStatement cExpr; for (int i = 0; i < es.length; i++) { cExpr = es[i]; if (cExpr.getKnownIngressType() == null) { cExpr.setKnownIngressType(parameterTypes[i]); cExpr.computeTypeConversionRule(); } if (!cExpr.isConvertableIngressEgress()) { args[i] = DataConversion.convert(args[i], parameterTypes[i]); } } } else { /** * Coerce any types if required. */ for (int i = 0; i < args.length; i++) args[i] = DataConversion.convert(args[i], parameterTypes[i]); } if (first) { debug("ALOAD 1"); mv.visitVarInsn(ALOAD, 1); } if (m.getParameterTypes().length == 0) { if ((m.getModifiers() & Modifier.STATIC) != 0) { debug("INVOKESTATIC " + m.getName()); mv.visitMethodInsn(INVOKESTATIC, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } else { debug("CHECKCAST " + getInternalName(m.getDeclaringClass())); mv.visitTypeInsn(CHECKCAST, getInternalName(m.getDeclaringClass())); debug("INVOKEVIRTUAL " + m.getName()); mv.visitMethodInsn(INVOKEVIRTUAL, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } returnType = m.getReturnType(); stacksize++; } else { if ((m.getModifiers() & Modifier.STATIC) == 0) { debug("CHECKCAST " + getInternalName(cls)); mv.visitTypeInsn(CHECKCAST, getInternalName(cls)); } for (int i = 0; i < es.length; i++) { debug("ALOAD 0"); mv.visitVarInsn(ALOAD, 0); debug("GETFIELD p" + inputs++); mv.visitFieldInsn(GETFIELD, className, "p" + (inputs - 1), "Lorg/mvel/optimizers/ExecutableStatement;"); debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); debug("ALOAD 3"); mv.visitVarInsn(ALOAD, 3); debug("INVOKEINTERFACE ExecutableStatement.getValue"); mv.visitMethodInsn(INVOKEINTERFACE, getInternalName(ExecutableStatement.class), "getValue", "(Ljava/lang/Object;Lorg/mvel/integration/VariableResolverFactory;)Ljava/lang/Object;"); if (parameterTypes[i].isPrimitive()) { unwrapPrimitive(parameterTypes[i]); } else if (preConvArgs[i] == null || (parameterTypes[i] != String.class && !parameterTypes[i].isAssignableFrom(preConvArgs[i].getClass()))) { debug("LDC " + getType(parameterTypes[i])); mv.visitLdcInsn(getType(parameterTypes[i])); debug("INVOKESTATIC DataConversion.convert"); mv.visitMethodInsn(INVOKESTATIC, "org/mvel/DataConversion", "convert", "(Ljava/lang/Object;Ljava/lang/Class;)Ljava/lang/Object;"); debug("CHECKCAST " + getInternalName(parameterTypes[i])); mv.visitTypeInsn(CHECKCAST, getInternalName(parameterTypes[i])); } else if (parameterTypes[i] == String.class) { debug("<<<DYNAMIC TYPE OPTIMIZATION STRING>>"); mv.visitMethodInsn(INVOKESTATIC, "java/lang/String", "valueOf", "(Ljava/lang/Object;)Ljava/lang/String;"); } else { debug("<<<DYNAMIC TYPING BYPASS>>>"); debug("<<<OPT. JUSTIFICATION " + parameterTypes[i] + "=" + preConvArgs[i].getClass() + ">>>"); debug("CHECKCAST " + getInternalName(parameterTypes[i])); mv.visitTypeInsn(CHECKCAST, getInternalName(parameterTypes[i])); } stacksize += 3; } if ((m.getModifiers() & Modifier.STATIC) != 0) { debug("INVOKESTATIC: " + m.getName()); mv.visitMethodInsn(INVOKESTATIC, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } else { if (m.getDeclaringClass() != cls && m.getDeclaringClass().isInterface()) { debug("INVOKEINTERFACE: " + getInternalName(m.getDeclaringClass()) + "." + m.getName()); mv.visitMethodInsn(INVOKEINTERFACE, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } else { debug("INVOKEVIRTUAL: " + getInternalName(cls) + "." + m.getName()); mv.visitMethodInsn(INVOKEVIRTUAL, getInternalName(cls), m.getName(), getMethodDescriptor(m)); } } returnType = m.getReturnType(); stacksize++; } return m.invoke(ctx, args); } } private Class tryStaticAccess() { try { /** * Try to resolve this *smartly* as a static class reference. * * This starts at the end of the token and starts to step backwards to figure out whether * or not this may be a static class reference. We search for method calls simply by * inspecting for ()'s. The first union area we come to where no brackets are present is our * test-point for a class reference. If we find a class, we pass the reference to the * property accessor along with trailing methods (if any). * */ boolean meth = false; int depth = 0; int last = property.length; for (int i = property.length - 1; i > 0; i--) { switch (property[i]) { case'.': if (!meth) { return Class.forName(new String(property, 0, last)); } meth = false; last = i; break; case')': if (depth++ == 0) meth = true; break; case'(': depth--; break; } } } catch (Exception cnfe) { // do nothing. } return null; } private java.lang.Class loadClass(byte[] b) throws Exception { //override classDefine (as it is protected) and define the class. Class clazz = null; ClassLoader loader = ClassLoader.getSystemClassLoader(); Class cls = Class.forName("java.lang.ClassLoader"); java.lang.reflect.Method method = cls.getDeclaredMethod("defineClass", new Class[]{String.class, byte[].class, int.class, int.class}); // protected method invocaton method.setAccessible(true); try { Object[] args = new Object[]{className, b, 0, (b.length)}; clazz = (Class) method.invoke(loader, args); } finally { method.setAccessible(false); } return clazz; } public static void debug(String instruction) { assert ParseTools.debug(instruction); } public String getName() { return "ASM"; } public Object getResultOptPass() { return val; } private void unwrapPrimitive(Class cls) { if (cls == boolean.class) { debug("CHECKCAST java/lang/Boolean"); mv.visitTypeInsn(CHECKCAST, "java/lang/Boolean"); debug("INVOKEVIRTUAL java/lang/Boolean.booleanValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Boolean", "booleanValue", "()Z"); } else if (cls == int.class) { debug("CHECKCAST java/lang/Integer"); mv.visitTypeInsn(CHECKCAST, "java/lang/Integer"); debug("INVOKEVIRTUAL java/lang/Integer.intValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I"); } else if (cls == float.class) { debug("CHECKCAST java/lang/Float"); mv.visitTypeInsn(CHECKCAST, "java/lang/Float"); debug("INVOKEVIRTUAL java/lang/Float.floatValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Float", "floatValue", "()F"); } else if (cls == double.class) { debug("CHECKCAST java/lang/Double"); mv.visitTypeInsn(CHECKCAST, "java/lang/Double"); debug("INVOKEVIRTUAL java/lang/Double.doubleValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Double", "doubleValue", "()D"); } else if (cls == short.class) { debug("CHECKCAST java/lang/Short"); mv.visitTypeInsn(CHECKCAST, "java/lang/Short"); debug("INVOKEVIRTUAL java/lang/Short.shortValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Short", "shortValue", "()S"); } else if (cls == long.class) { debug("CHECKCAST java/lang/Long"); mv.visitTypeInsn(CHECKCAST, "java/lang/Long"); debug("INVOKEVIRTUAL java/lang/Long.longValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Long", "longValue", "()L"); } else if (cls == byte.class) { debug("CHECKCAST java/lang/Byte"); mv.visitTypeInsn(CHECKCAST, "java/lang/Byte"); debug("INVOKEVIRTUAL java/lang/Byte.byteValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Byte", "byteValue", "()B"); } else if (cls == char.class) { debug("CHECKCAST java/lang/Character"); mv.visitTypeInsn(CHECKCAST, "java/lang/Character"); debug("INVOKEVIRTUAL java/lang/Character.charValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Character", "charValue", "()C"); } } private void wrapPrimitive(Class<? extends Object> cls) { if (cls == boolean.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;"); } else if (cls == int.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;"); } else if (cls == float.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Float", "valueOf", "(F)Ljava/lang/Float;"); } else if (cls == double.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Double", "valueOf", "(D)Ljava/lang/Double;"); } else if (cls == short.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Short", "valueOf", "(S)Ljava/lang/Short;"); } else if (cls == long.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Long", "valueOf", "(J)Ljava/lang/Long;"); } else if (cls == byte.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Byte", "valueOf", "(B)Ljava/lang/Byte;"); } else if (cls == char.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Character", "valueOf", "(C)Ljava/lang/Character;"); } } public void buildInputs() { if (inputs == 0) return; debug("\n{SETTING UP MEMBERS...}\n"); StringAppender constSig = new StringAppender("("); int size = inputs; for (int i = 0; i < size; i++) { debug("ACC_PRIVATE p" + i); FieldVisitor fv = cw.visitField(ACC_PRIVATE, "p" + i, "Lorg/mvel/optimizers/ExecutableStatement;", null, null); fv.visitEnd(); constSig.append("Lorg/mvel/optimizers/ExecutableStatement;"); } constSig.append(")V"); debug("\n{CREATING INJECTION CONSTRUCTOR}\n"); MethodVisitor cv = cw.visitMethod(ACC_PUBLIC, "<init>", constSig.toString(), null, null); cv.visitCode(); debug("ALOAD 0"); cv.visitVarInsn(ALOAD, 0); debug("INVOKESPECIAL java/lang/Object.<init>"); cv.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V"); for (int i = 0; i < size; i++) { debug("ALOAD 0"); cv.visitVarInsn(ALOAD, 0); debug("ALOAD " + (i + 1)); cv.visitVarInsn(ALOAD, i + 1); debug("PUTFIELD p" + i); cv.visitFieldInsn(PUTFIELD, className, "p" + i, "Lorg/mvel/optimizers/ExecutableStatement;"); } debug("RETURN"); cv.visitInsn(RETURN); cv.visitMaxs(0, 0); cv.visitEnd(); debug("}"); } }
src/main/java/org/mvel/optimizers/impl/asm/ASMAccessorCompiler.java
package org.mvel.optimizers.impl.asm; import org.mvel.*; import org.mvel.integration.VariableResolverFactory; import org.mvel.optimizers.AccessorCompiler; import org.mvel.optimizers.ExecutableStatement; import org.mvel.optimizers.OptimizationNotSupported; import org.mvel.util.ParseTools; import org.mvel.util.PropertyTools; import org.mvel.util.StringAppender; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.FieldVisitor; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import static org.objectweb.asm.Opcodes.*; import static org.objectweb.asm.Type.*; import java.lang.reflect.*; import java.util.*; public class ASMAccessorCompiler implements AccessorCompiler { private static final int OPCODES_VERSION; static { String javaVersion = System.getProperty("java.version"); if (javaVersion.startsWith("1.4")) OPCODES_VERSION = Opcodes.V1_4; else if (javaVersion.startsWith("1.5")) OPCODES_VERSION = Opcodes.V1_5; else if (javaVersion.startsWith("1.6") || javaVersion.startsWith("1.7")) OPCODES_VERSION = Opcodes.V1_6; else OPCODES_VERSION = Opcodes.V1_2; } private int start = 0; private int cursor = 0; private char[] property; private int length; private Object ctx; private Object thisRef; private VariableResolverFactory variableFactory; private static final int DONE = -1; private static final int BEAN = 0; private static final int METH = 1; private static final int COL = 2; private static final Object[] EMPTYARG = new Object[0]; private boolean first = true; private String className; private ClassWriter cw; private MethodVisitor mv; private Object val; private int stacksize = 1; private long time; private int inputs; private ArrayList<ExecutableStatement> compiledInputs; private Class returnType; public ASMAccessorCompiler(char[] property, Object ctx, Object thisRef, VariableResolverFactory variableResolverFactory) { this.property = property; this.ctx = ctx; this.variableFactory = variableResolverFactory; this.thisRef = thisRef; } public ASMAccessorCompiler() { } public Accessor compile(char[] property, Object staticContext, Object thisRef, VariableResolverFactory factory, boolean root) { time = System.currentTimeMillis(); inputs = 0; compiledInputs = new ArrayList<ExecutableStatement>(); start = cursor = 0; this.first = true; this.val = null; this.length = property.length; this.property = property; this.ctx = staticContext; this.thisRef = thisRef; this.variableFactory = factory; cw = new ClassWriter(ClassWriter.COMPUTE_MAXS + ClassWriter.COMPUTE_FRAMES); cw.visit(OPCODES_VERSION, Opcodes.ACC_PUBLIC + Opcodes.ACC_SUPER, className = "ASMAccessorImpl_" + String.valueOf(cw.hashCode()).replaceAll("\\-", "_"), null, "java/lang/Object", new String[]{"org/mvel/Accessor"}); MethodVisitor m = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null); m.visitCode(); m.visitVarInsn(Opcodes.ALOAD, 0); m.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/Object", "<init>", "()V"); m.visitInsn(Opcodes.RETURN); m.visitMaxs(1, 1); m.visitEnd(); mv = cw.visitMethod(ACC_PUBLIC, "getValue", "(Ljava/lang/Object;Ljava/lang/Object;Lorg/mvel/integration/VariableResolverFactory;)Ljava/lang/Object;", null, null); mv.visitCode(); return compileAccessor(); } public Accessor compileAccessor() { debug("\n{Initiate Compile: " + new String(property) + "}\n"); Object curr = ctx; try { while (cursor < length) { switch (nextToken()) { case BEAN: curr = getBeanProperty(curr, capture()); break; case METH: curr = getMethod(curr, capture()); break; case COL: curr = getCollectionProperty(curr, capture()); break; case DONE: break; } first = false; } val = curr; if (returnType != null && returnType.isPrimitive()) { //noinspection unchecked wrapPrimitive(returnType); } if (returnType == void.class) { debug("ACONST_NULL"); mv.visitInsn(ACONST_NULL); } debug("ARETURN"); mv.visitInsn(ARETURN); debug("\n{METHOD STATS (maxstack=" + stacksize + ")}\n"); mv.visitMaxs(stacksize, 1); mv.visitEnd(); buildInputs(); cw.visitEnd(); Class cls = loadClass(cw.toByteArray()); debug("[MVEL JIT Completed Optimization <<" + new String(property) + ">>]::" + cls + " (time: " + (System.currentTimeMillis() - time) + "ms)"); Accessor a; if (inputs == 0) { a = (Accessor) cls.newInstance(); } else { Class[] parms = new Class[inputs]; for (int i = 0; i < inputs; i++) { parms[i] = ExecutableStatement.class; } a = (Accessor) cls.getConstructor(parms).newInstance(compiledInputs.toArray(new ExecutableStatement[compiledInputs.size()])); } debug("[MVEL JIT Test Output: " + a.getValue(ctx, thisRef, variableFactory) + "]"); return a; } catch (InvocationTargetException e) { throw new PropertyAccessException("could not access property", e); } catch (IllegalAccessException e) { throw new PropertyAccessException("could not access property", e); } catch (IndexOutOfBoundsException e) { throw new PropertyAccessException("array or collection index out of bounds (property: " + new String(property) + ")", e); } catch (PropertyAccessException e) { throw new PropertyAccessException("failed to access property: <<" + new String(property) + ">> in: " + (ctx != null ? ctx.getClass() : null), e); } catch (CompileException e) { throw e; } catch (NullPointerException e) { throw new PropertyAccessException("null pointer exception in property: " + new String(property), e); } catch (OptimizationNotSupported e) { throw e; } catch (Exception e) { throw new PropertyAccessException("unknown exception in expression: " + new String(property), e); } } private int nextToken() { switch (property[start = cursor]) { case'[': return COL; case'.': cursor = ++start; } //noinspection StatementWithEmptyBody while (++cursor < length && Character.isJavaIdentifierPart(property[cursor])) ; if (cursor < length) { switch (property[cursor]) { case'[': return COL; case'(': return METH; default: return 0; } } return 0; } private String capture() { return new String(property, start, cursor - start); } private Object getBeanProperty(Object ctx, String property) throws IllegalAccessException, InvocationTargetException { debug("{bean: " + property + "}"); Class cls = (ctx instanceof Class ? ((Class) ctx) : ctx != null ? ctx.getClass() : null); Member member = cls != null ? PropertyTools.getFieldOrAccessor(cls, property) : null; if (first && variableFactory != null && variableFactory.isResolveable(property)) { try { debug("ALOAD 3"); mv.visitVarInsn(ALOAD, 3); debug("LDC :" + property); mv.visitLdcInsn(property); debug("INVOKEINTERFACE org/mvel/integration/VariableResolverFactory.getVariableResolver"); mv.visitMethodInsn(INVOKEINTERFACE, "org/mvel/integration/VariableResolverFactory", "getVariableResolver", "(Ljava/lang/String;)Lorg/mvel/integration/VariableResolver;"); debug("INVOKEINTERFACE org/mvel/integration/VariableResolver.getValue"); mv.visitMethodInsn(INVOKEINTERFACE, "org/mvel/integration/VariableResolver", "getValue", "()Ljava/lang/Object;"); } catch (Exception e) { throw new OptimizationFailure("critical error in JIT", e); } return variableFactory.getVariableResolver(property).getValue(); } else if (member instanceof Field) { Object o = ((Field) member).get(ctx); if (first) { debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); } debug("CHECKCAST " + getInternalName(cls)); mv.visitTypeInsn(CHECKCAST, getInternalName(cls)); debug("GETFIELD " + property + ":" + getDescriptor(((Field) member).getType())); mv.visitFieldInsn(GETFIELD, getInternalName(cls), property, getDescriptor(((Field) member).getType())); // addAccessorComponent(cls, property, FIELD, ((Field) member).getType()); return o; } else if (member != null) { if (first) { debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); } debug("CHECKCAST " + getInternalName(member.getDeclaringClass())); mv.visitTypeInsn(CHECKCAST, getInternalName(member.getDeclaringClass())); returnType = ((Method) member).getReturnType(); debug("INVOKEVIRTUAL " + member.getName() + ":" + returnType); mv.visitMethodInsn(INVOKEVIRTUAL, getInternalName(member.getDeclaringClass()), member.getName(), getMethodDescriptor((Method) member)); stacksize++; return ((Method) member).invoke(ctx, EMPTYARG); } else if (ctx instanceof Map && ((Map) ctx).containsKey(property)) { debug("CHECKCAST java/util/Map"); mv.visitTypeInsn(CHECKCAST, "java/util/Map"); debug("LDC: \"" + property + "\""); mv.visitLdcInsn(property); debug("INVOKEINTERFACE: get"); mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Map", "get", "(Ljava/lang/Object;)Ljava/lang/Object;"); return ((Map) ctx).get(property); } else if ("this".equals(property)) { debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); // load the thisRef value. return this.thisRef; } else if (Token.LITERALS.containsKey(property)) { return Token.LITERALS.get(property); } else { Class tryStaticMethodRef = tryStaticAccess(); if (tryStaticMethodRef != null) { throw new OptimizationNotSupported("class literal: " + tryStaticMethodRef); } else throw new PropertyAccessException("could not access property (" + property + ")"); } } private void whiteSpaceSkip() { if (cursor < length) //noinspection StatementWithEmptyBody while (Character.isWhitespace(property[cursor]) && ++cursor < length) ; } private boolean scanTo(char c) { for (; cursor < length; cursor++) { if (property[cursor] == c) { return true; } } return false; } private int containsStringLiteralTermination() { int pos = cursor; for (pos--; pos > 0; pos--) { if (property[pos] == '\'' || property[pos] == '"') return pos; else if (!Character.isWhitespace(property[pos])) return pos; } return -1; } /** * Handle accessing a property embedded in a collection, map, or array * * @param ctx - * @param prop - * @return - * @throws Exception - */ private Object getCollectionProperty(Object ctx, String prop) throws Exception { if (prop.length() > 0) ctx = getBeanProperty(ctx, prop); debug("{collection: " + prop + "} ctx=" + ctx); int start = ++cursor; whiteSpaceSkip(); if (cursor == length) throw new PropertyAccessException("unterminated '['"); String item; if (property[cursor] == '\'' || property[cursor] == '"') { start++; int end; if (!scanTo(']')) throw new PropertyAccessException("unterminated '['"); if ((end = containsStringLiteralTermination()) == -1) throw new PropertyAccessException("unterminated string literal in collection accessor"); item = new String(property, start, end - start); } else { if (!scanTo(']')) throw new PropertyAccessException("unterminated '['"); item = new String(property, start, cursor - start); } ++cursor; if (ctx instanceof Map) { debug("CHECKCAST java/util/Map"); mv.visitTypeInsn(CHECKCAST, "java/util/Map"); debug("LDC: \"" + item + "\""); mv.visitLdcInsn(item); debug("INVOKEINTERFACE: get"); mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Map", "get", "(Ljava/lang/Object;)Ljava/lang/Object;"); return ((Map) ctx).get(item); } else if (ctx instanceof List) { int index = Integer.parseInt(item); debug("CHECKCAST java/util/List"); mv.visitTypeInsn(CHECKCAST, "java/util/List"); debug("BIGPUSH: " + 6); mv.visitIntInsn(BIPUSH, index); debug("INVOKEINTERFACE: java/util/List.get"); mv.visitMethodInsn(INVOKEINTERFACE, "java/util/List", "get", "(I)Ljava/lang/Object;"); return ((List) ctx).get(index); } else if (ctx instanceof Collection) { int count = Integer.parseInt(item); if (count > ((Collection) ctx).size()) throw new PropertyAccessException("index [" + count + "] out of bounds on collection"); Iterator iter = ((Collection) ctx).iterator(); for (int i = 0; i < count; i++) iter.next(); return iter.next(); } else if (ctx instanceof Object[]) { int index = Integer.parseInt(item); debug("CHECKCAST [Ljava/lang/Object;"); mv.visitTypeInsn(CHECKCAST, "[Ljava/lang/Object;"); if (index < 6) { switch (index) { case 0: debug("ICONST_0"); mv.visitInsn(ICONST_0); break; case 1: debug("ICONST_1"); mv.visitInsn(ICONST_1); break; case 2: debug("ICONST_2"); mv.visitInsn(ICONST_2); break; case 3: debug("ICONST_3"); mv.visitInsn(ICONST_3); break; case 4: debug("ICONST_4"); mv.visitInsn(ICONST_4); break; case 5: debug("ICONST_5"); mv.visitInsn(ICONST_5); break; } } else { debug("BIPUSH " + index); mv.visitIntInsn(BIPUSH, index); } mv.visitInsn(AALOAD); return ((Object[]) ctx)[index]; } else if (ctx instanceof CharSequence) { int index = Integer.parseInt(item); mv.visitIntInsn(BIPUSH, index); mv.visitMethodInsn(INVOKEINTERFACE, "java/lang/CharSequence", "charAt", "(I)C"); return ((CharSequence) ctx).charAt(index); } else { throw new PropertyAccessException("illegal use of []: unknown type: " + (ctx == null ? null : ctx.getClass().getName())); } } private static final Map<String, ExecutableStatement[]> SUBEXPRESSION_CACHE = new WeakHashMap<String, ExecutableStatement[]>(); /** * Find an appropriate method, execute it, and return it's response. * * @param ctx - * @param name - * @return - * @throws Exception - */ @SuppressWarnings({"unchecked"}) private Object getMethod(Object ctx, String name) throws Exception { debug("{method: " + name + "}"); int st = cursor; int depth = 1; while (cursor++ < length - 1 && depth != 0) { switch (property[cursor]) { case'(': depth++; continue; case')': depth--; } } cursor--; String tk = (cursor - st) > 1 ? new String(property, st + 1, cursor - st - 1) : ""; cursor++; Object[] preConvArgs; Object[] args; ExecutableStatement[] es; if (tk.length() == 0) { //noinspection ZeroLengthArrayAllocation args = new Object[0]; //noinspection ZeroLengthArrayAllocation preConvArgs = new Object[0]; es = null; } else { if (SUBEXPRESSION_CACHE.containsKey(tk)) { es = SUBEXPRESSION_CACHE.get(tk); args = new Object[es.length]; preConvArgs = new Object[es.length]; for (int i = 0; i < es.length; i++) { preConvArgs[i] = args[i] = es[i].getValue(this.ctx, variableFactory); } } else { String[] subtokens = ParseTools.parseParameterList(tk.toCharArray(), 0, -1); es = new ExecutableStatement[subtokens.length]; args = new Object[subtokens.length]; preConvArgs = new Object[es.length]; for (int i = 0; i < subtokens.length; i++) { preConvArgs[i] = args[i] = (es[i] = (ExecutableStatement) ExpressionParser.compileExpression(subtokens[i])).getValue(this.ctx, variableFactory); } SUBEXPRESSION_CACHE.put(tk, es); } } if (es != null) { for (ExecutableStatement e : es) compiledInputs.add(e); } /** * If the target object is an instance of java.lang.Class itself then do not * adjust the Class scope target. */ Class cls = ctx instanceof Class ? (Class) ctx : ctx.getClass(); Method m; Class[] parameterTypes = null; /** * If we have not cached the method then we need to go ahead and try to resolve it. */ /** * Try to find an instance method from the class target. */ if ((m = ParseTools.getBestCanadidate(args, name, cls.getMethods())) != null) { parameterTypes = m.getParameterTypes(); } if (m == null) { /** * If we didn't find anything, maybe we're looking for the actual java.lang.Class methods. */ if ((m = ParseTools.getBestCanadidate(args, name, cls.getClass().getDeclaredMethods())) != null) { parameterTypes = m.getParameterTypes(); } } if (m == null) { StringAppender errorBuild = new StringAppender(); for (int i = 0; i < args.length; i++) { errorBuild.append(args[i] != null ? args[i].getClass().getName() : null); if (i < args.length - 1) errorBuild.append(", "); } throw new PropertyAccessException("unable to resolve method: " + cls.getName() + "." + name + "(" + errorBuild.toString() + ") [arglength=" + args.length + "]"); } else { if (es != null) { ExecutableStatement cExpr; for (int i = 0; i < es.length; i++) { cExpr = es[i]; if (cExpr.getKnownIngressType() == null) { cExpr.setKnownIngressType(parameterTypes[i]); cExpr.computeTypeConversionRule(); } if (!cExpr.isConvertableIngressEgress()) { args[i] = DataConversion.convert(args[i], parameterTypes[i]); } } } else { /** * Coerce any types if required. */ for (int i = 0; i < args.length; i++) args[i] = DataConversion.convert(args[i], parameterTypes[i]); } if (first) { debug("ALOAD 1"); mv.visitVarInsn(ALOAD, 1); } if (m.getParameterTypes().length == 0) { if ((m.getModifiers() & Modifier.STATIC) != 0) { debug("INVOKESTATIC " + m.getName()); mv.visitMethodInsn(INVOKESTATIC, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } else { debug("CHECKCAST " + getInternalName(m.getDeclaringClass())); mv.visitTypeInsn(CHECKCAST, getInternalName(m.getDeclaringClass())); debug("INVOKEVIRTUAL " + m.getName()); mv.visitMethodInsn(INVOKEVIRTUAL, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } returnType = m.getReturnType(); stacksize++; } else { if ((m.getModifiers() & Modifier.STATIC) == 0) { debug("CHECKCAST " + getInternalName(cls)); mv.visitTypeInsn(CHECKCAST, getInternalName(cls)); } for (int i = 0; i < es.length; i++) { debug("ALOAD 0"); mv.visitVarInsn(ALOAD, 0); debug("GETFIELD p" + inputs++); mv.visitFieldInsn(GETFIELD, className, "p" + (inputs - 1), "Lorg/mvel/optimizers/ExecutableStatement;"); debug("ALOAD 2"); mv.visitVarInsn(ALOAD, 2); debug("ALOAD 3"); mv.visitVarInsn(ALOAD, 3); debug("INVOKEINTERFACE ExecutableStatement.getValue"); mv.visitMethodInsn(INVOKEINTERFACE, getInternalName(ExecutableStatement.class), "getValue", "(Ljava/lang/Object;Lorg/mvel/integration/VariableResolverFactory;)Ljava/lang/Object;"); if (parameterTypes[i].isPrimitive()) { unwrapPrimitive(parameterTypes[i]); } else if (preConvArgs[i] == null || (parameterTypes[i] != String.class && !parameterTypes[i].isAssignableFrom(preConvArgs[i].getClass()))) { debug("LDC " + getType(parameterTypes[i])); mv.visitLdcInsn(getType(parameterTypes[i])); debug("INVOKESTATIC DataConversion.convert"); mv.visitMethodInsn(INVOKESTATIC, "org/mvel/DataConversion", "convert", "(Ljava/lang/Object;Ljava/lang/Class;)Ljava/lang/Object;"); debug("CHECKCAST " + getInternalName(parameterTypes[i])); mv.visitTypeInsn(CHECKCAST, getInternalName(parameterTypes[i])); } else if (parameterTypes[i] == String.class) { debug("<<<DYNAMIC TYPE OPTIMIZATION STRING>>"); mv.visitMethodInsn(INVOKESTATIC, "java/lang/String", "valueOf", "(Ljava/lang/Object;)Ljava/lang/String;"); } else { debug("<<<DYNAMIC TYPING BYPASS>>>"); debug("<<<OPT. JUSTIFICATION " + parameterTypes[i] + "=" + preConvArgs[i].getClass() + ">>>"); debug("CHECKCAST " + getInternalName(parameterTypes[i])); mv.visitTypeInsn(CHECKCAST, getInternalName(parameterTypes[i])); } stacksize += 3; } if ((m.getModifiers() & Modifier.STATIC) != 0) { debug("INVOKESTATIC: " + m.getName()); mv.visitMethodInsn(INVOKESTATIC, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } else { if (m.getDeclaringClass() != cls && m.getDeclaringClass().isInterface()) { debug("INVOKEINTERFACE: " + getInternalName(m.getDeclaringClass()) + "." + m.getName()); mv.visitMethodInsn(INVOKEINTERFACE, getInternalName(m.getDeclaringClass()), m.getName(), getMethodDescriptor(m)); } else { debug("INVOKEVIRTUAL: " + getInternalName(cls) + "." + m.getName()); mv.visitMethodInsn(INVOKEVIRTUAL, getInternalName(cls), m.getName(), getMethodDescriptor(m)); } } returnType = m.getReturnType(); stacksize++; } return m.invoke(ctx, args); } } private Class tryStaticAccess() { try { /** * Try to resolve this *smartly* as a static class reference. * * This starts at the end of the token and starts to step backwards to figure out whether * or not this may be a static class reference. We search for method calls simply by * inspecting for ()'s. The first union area we come to where no brackets are present is our * test-point for a class reference. If we find a class, we pass the reference to the * property accessor along with trailing methods (if any). * */ boolean meth = false; int depth = 0; int last = property.length; for (int i = property.length - 1; i > 0; i--) { switch (property[i]) { case'.': if (!meth) { return Class.forName(new String(property, 0, last)); } meth = false; last = i; break; case')': if (depth++ == 0) meth = true; break; case'(': depth--; break; } } } catch (Exception cnfe) { // do nothing. } return null; } private java.lang.Class loadClass(byte[] b) throws Exception { //override classDefine (as it is protected) and define the class. Class clazz = null; ClassLoader loader = ClassLoader.getSystemClassLoader(); Class cls = Class.forName("java.lang.ClassLoader"); java.lang.reflect.Method method = cls.getDeclaredMethod("defineClass", new Class[]{String.class, byte[].class, int.class, int.class}); // protected method invocaton method.setAccessible(true); try { Object[] args = new Object[]{className, b, 0, (b.length)}; clazz = (Class) method.invoke(loader, args); } finally { method.setAccessible(false); } return clazz; } public static void debug(String instruction) { assert ParseTools.debug(instruction); } public String getName() { return "ASM"; } public Object getResultOptPass() { return val; } private void unwrapPrimitive(Class cls) { if (cls == boolean.class) { debug("CHECKCAST java/lang/Boolean"); mv.visitTypeInsn(CHECKCAST, "java/lang/Boolean"); debug("INVOKEVIRTUAL java/lang/Boolean.booleanValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Boolean", "booleanValue", "()Z"); } else if (cls == int.class) { debug("CHECKCAST java/lang/Integer"); mv.visitTypeInsn(CHECKCAST, "java/lang/Integer"); debug("INVOKEVIRTUAL java/lang/Integer.intValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I"); } else if (cls == float.class) { debug("CHECKCAST java/lang/Float"); mv.visitTypeInsn(CHECKCAST, "java/lang/Float"); debug("INVOKEVIRTUAL java/lang/Float.floatValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Float", "floatValue", "()F"); } else if (cls == double.class) { debug("CHECKCAST java/lang/Double"); mv.visitTypeInsn(CHECKCAST, "java/lang/Double"); debug("INVOKEVIRTUAL java/lang/Double.doubleValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Double", "doubleValue", "()D"); } else if (cls == short.class) { debug("CHECKCAST java/lang/Short"); mv.visitTypeInsn(CHECKCAST, "java/lang/Short"); debug("INVOKEVIRTUAL java/lang/Short.shortValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Short", "shortValue", "()S"); } else if (cls == long.class) { debug("CHECKCAST java/lang/Long"); mv.visitTypeInsn(CHECKCAST, "java/lang/Long"); debug("INVOKEVIRTUAL java/lang/Long.longValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Long", "longValue", "()L"); } else if (cls == byte.class) { debug("CHECKCAST java/lang/Byte"); mv.visitTypeInsn(CHECKCAST, "java/lang/Byte"); debug("INVOKEVIRTUAL java/lang/Byte.byteValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Byte", "byteValue", "()B"); } else if (cls == char.class) { debug("CHECKCAST java/lang/Character"); mv.visitTypeInsn(CHECKCAST, "java/lang/Character"); debug("INVOKEVIRTUAL java/lang/Character.charValue"); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Character", "charValue", "()C"); } } private void wrapPrimitive(Class<? extends Object> cls) { if (cls == boolean.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;"); } else if (cls == int.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;"); } else if (cls == float.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Float", "valueOf", "(F)Ljava/lang/Float;"); } else if (cls == double.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Double", "valueOf", "(D)Ljava/lang/Double;"); } else if (cls == short.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Short", "valueOf", "(S)Ljava/lang/Short;"); } else if (cls == long.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Long", "valueOf", "(J)Ljava/lang/Long;"); } else if (cls == byte.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Byte", "valueOf", "(B)Ljava/lang/Byte;"); } else if (cls == char.class) { mv.visitMethodInsn(INVOKESTATIC, "java/lang/Character", "valueOf", "(C)Ljava/lang/Character;"); } } public void buildInputs() { if (inputs == 0) return; debug("\n{SETTING UP MEMBERS...}\n"); StringAppender constSig = new StringAppender("("); int size = inputs; for (int i = 0; i < size; i++) { debug("ACC_PRIVATE p" + i); FieldVisitor fv = cw.visitField(ACC_PRIVATE, "p" + i, "Lorg/mvel/optimizers/ExecutableStatement;", null, null); fv.visitEnd(); constSig.append("Lorg/mvel/optimizers/ExecutableStatement;"); } constSig.append(")V"); debug("\n{CREATING INJECTION CONSTRUCTOR}\n"); MethodVisitor cv = cw.visitMethod(ACC_PUBLIC, "<init>", constSig.toString(), null, null); cv.visitCode(); debug("ALOAD 0"); cv.visitVarInsn(ALOAD, 0); debug("INVOKESPECIAL java/lang/Object.<init>"); cv.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V"); for (int i = 0; i < size; i++) { debug("ALOAD 0"); cv.visitVarInsn(ALOAD, 0); debug("ALOAD " + (i + 1)); cv.visitVarInsn(ALOAD, i + 1); debug("PUTFIELD p" + i); cv.visitFieldInsn(PUTFIELD, className, "p" + i, "Lorg/mvel/optimizers/ExecutableStatement;"); } debug("RETURN"); cv.visitInsn(RETURN); cv.visitMaxs(0, 0); cv.visitEnd(); debug("}"); } }
JIT Integrated
src/main/java/org/mvel/optimizers/impl/asm/ASMAccessorCompiler.java
JIT Integrated
Java
apache-2.0
ab0164e9452369e0a1d3e216f32ddcb320f269f5
0
huitseeker/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j,kinbod/deeplearning4j,kinbod/deeplearning4j,kinbod/deeplearning4j
package org.deeplearning4j.spark.iterator; import org.apache.spark.input.PortableDataStream; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.api.DataSetPreProcessor; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.Iterator; import java.util.List; /** * Created by huitseeker on 2/15/17. */ public abstract class BaseDataSetIterator<T> implements DataSetIterator { protected Collection<T> dataSetStreams; protected DataSetPreProcessor preprocessor; protected Iterator<T> iter; protected int totalOutcomes = -1; protected int inputColumns = -1; protected int batch = -1; protected DataSet preloadedDataSet; protected int cursor = 0; @Override public DataSet next(int num) { return next(); } @Override public abstract int totalExamples(); @Override public int inputColumns() { if(inputColumns == -1) preloadDataSet(); return inputColumns; } @Override public int totalOutcomes() { if(totalOutcomes == -1) preloadDataSet(); return totalExamples(); } @Override public boolean resetSupported(){ return dataSetStreams != null; } @Override public boolean asyncSupported() { return true; } @Override public void reset() { if(dataSetStreams == null) throw new IllegalStateException("Cannot reset iterator constructed with an iterator"); iter = dataSetStreams.iterator(); cursor = 0; } @Override public int batch() { if(batch == -1) preloadDataSet(); return batch; } @Override public int cursor() { return cursor; } @Override public int numExamples() { return 0; } @Override public void setPreProcessor(DataSetPreProcessor preProcessor) { this.preprocessor = preProcessor; } @Override public DataSetPreProcessor getPreProcessor() { return this.preprocessor; } @Override public List<String> getLabels() { return null; } @Override public boolean hasNext() { return iter.hasNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } private void preloadDataSet(){ preloadedDataSet = load(iter.next()); totalOutcomes = preloadedDataSet.getLabels().size(1); inputColumns = preloadedDataSet.getFeatureMatrix().size(1); } protected abstract DataSet load(T ds); }
deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/BaseDataSetIterator.java
package org.deeplearning4j.spark.iterator; import org.apache.spark.input.PortableDataStream; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.api.DataSetPreProcessor; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.Iterator; import java.util.List; /** * Created by huitseeker on 2/15/17. */ public abstract class BaseDataSetIterator<T> implements DataSetIterator { protected Collection<T> dataSetStreams; protected DataSetPreProcessor preprocessor; protected Iterator<T> iter; protected int totalOutcomes = -1; protected int inputColumns = -1; protected int batch = -1; protected DataSet preloadedDataSet; protected int cursor = 0; @Override public DataSet next(int num) { return next(); } @Override public abstract int totalExamples(); @Override public int inputColumns() { if(inputColumns == -1) preloadDataSet(); return inputColumns; } @Override public int totalOutcomes() { if(totalOutcomes == -1) preloadDataSet(); return totalExamples(); } @Override public boolean resetSupported(){ return dataSetStreams != null; } @Override public boolean asyncSupported() { return true; } @Override public void reset() { if(dataSetStreams == null) throw new IllegalStateException("Cannot reset iterator constructed with an iterator"); iter = dataSetStreams.iterator(); cursor = 0; } @Override public int batch() { if(batch == -1) preloadDataSet(); return batch; } @Override public int cursor() { return cursor; } @Override public int numExamples() { return 0; } @Override public void setPreProcessor(DataSetPreProcessor preProcessor) { this.preprocessor = preProcessor; } @Override public DataSetPreProcessor getPreProcessor() { return this.preprocessor; } @Override public List<String> getLabels() { return null; } @Override public boolean hasNext() { return iter.hasNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } private void preloadDataSet(){ preloadedDataSet = load(iter.next()); totalOutcomes = preloadedDataSet.getLabels().size(1); inputColumns = preloadedDataSet.getFeatureMatrix().size(1); batch = preloadedDataSet.numExamples(); } protected abstract DataSet load(T ds); }
Don't assume batch dim in preloading a BaseDataSet See https://github.com/deeplearning4j/deeplearning4j/pull/2843#discussion_r102342951 Former-commit-id: c75ea610ae2e2760da4a83dfdd52cf3d9b3f72bb
deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/BaseDataSetIterator.java
Don't assume batch dim in preloading a BaseDataSet
Java
apache-2.0
9346afc1b690fc5bfa4741c58fca203438e12099
0
digital-dreamer/lighthouse,mikelangley/lighthouse,vinumeris/lighthouse,vinumeris/lighthouse,digital-dreamer/lighthouse,vinumeris/lighthouse,mikelangley/lighthouse,mikelangley/lighthouse,mikelangley/lighthouse,vinumeris/lighthouse
package lighthouse.subwindows; import com.google.common.util.concurrent.*; import com.vinumeris.crashfx.*; import javafx.application.*; import javafx.event.*; import javafx.fxml.*; import javafx.scene.control.*; import lighthouse.*; import lighthouse.protocol.*; import lighthouse.wallet.*; import org.bitcoinj.core.*; import org.slf4j.*; import org.spongycastle.crypto.params.*; import javax.annotation.*; import java.util.*; import static com.google.common.base.Preconditions.*; import static lighthouse.utils.GuiUtils.*; /** * Tells the user there's a fee to pay and shows a progress bar that tracks network propagation. Possibly request the * users password first. This is used for both revocation and claiming the contract. */ public class RevokeAndClaimWindow { private static final Logger log = LoggerFactory.getLogger(RevokeAndClaimWindow.class); // Either this ... public LHProtos.Pledge pledgeToRevoke; // Or these ... public Project projectToClaim; public Set<LHProtos.Pledge> pledgesToClaim; // ... are set. pledgesToClaim is IGNORED in server assisted projects however because one of the pledges might have // been revoked whilst the user was sitting on the claim screen. So we always refetch the status and recheck just // before doing the claim. public Runnable onSuccess; public Main.OverlayUI overlayUI; public Button cancelBtn; public Button confirmBtn; public Label explanationLabel; @FXML ProgressBar progressBar; public static Main.OverlayUI<RevokeAndClaimWindow> openForRevoke(LHProtos.Pledge pledgeToRevoke) { Main.OverlayUI<RevokeAndClaimWindow> overlay = Main.instance.overlayUI("subwindows/revoke_and_claim.fxml", "Revoke pledge"); overlay.controller.setForRevoke(pledgeToRevoke); return overlay; } public static Main.OverlayUI<RevokeAndClaimWindow> openForClaim(Project project, Set<LHProtos.Pledge> pledgesToClaim) { Main.OverlayUI<RevokeAndClaimWindow> overlay = Main.instance.overlayUI("subwindows/revoke_and_claim.fxml", "Claim pledges"); overlay.controller.setForClaim(project, pledgesToClaim); return overlay; } private void setForClaim(Project project, Set<LHProtos.Pledge> claim) { projectToClaim = project; pledgesToClaim = claim; explanationLabel.setText("Claiming a project sends all the pledged money to the project's goal address. " + explanationLabel.getText()); } private void setForRevoke(LHProtos.Pledge revoke) { pledgeToRevoke = revoke; explanationLabel.setText("Revoking a pledge returns the money to your wallet. " + explanationLabel.getText()); } @FXML public void confirmClicked(ActionEvent event) { // runLater: shitty hack around RT-37821 (consider upgrading to 8u40 when available and/or applying fix locally) // otherwise pressing enter can cause a crash here when we open a new window with a default button Platform.runLater(this::confirmClicked); } private void confirmClicked() { if (Main.wallet.isEncrypted()) { log.info("Wallet is encrypted, requesting password"); WalletPasswordController.requestPasswordWithNextWindow(key -> { Main.OverlayUI<RevokeAndClaimWindow> screen; if (projectToClaim != null) { screen = openForClaim(projectToClaim, pledgesToClaim); } else { screen = openForRevoke(pledgeToRevoke); } screen.controller.onSuccess = onSuccess; screen.controller.go(key); }); } else { go(null); } } private void go(@Nullable KeyParameter aesKey) { confirmBtn.setDisable(true); cancelBtn.setDisable(true); if (pledgeToRevoke != null) { revoke(aesKey); } else { checkState(projectToClaim != null); claim(aesKey); } } private void claim(@Nullable KeyParameter key) { if (projectToClaim.getPaymentURL() != null) { claimServerAssisted(key); } else { log.info("Attempting to claim serverless project, proceeding to merge and broadcast"); broadcastClaim(pledgesToClaim, key); } } private void claimServerAssisted(@Nullable KeyParameter key) { // Need to refresh here because we're polling the server and might be lagging behind the true state. // This is kind of a hack. Better solutions would be: // // 1) Check the pledges returned ourselves against p2p network using getutxo: lowers trust in the server // 2) Have server stream updates to client so we are never more than a second or two behind, instead of // a block interval like today. log.info("Attempting to claim server assisted project, refreshing"); progressBar.setProgress(-1); Main.backend.refreshProjectStatusFromServer(projectToClaim, key).handleAsync((status, ex) -> { // On backend thread. if (ex != null) { log.error("Unable to fetch project status", ex); informationalAlert("Unable to claim", "Could not fetch project status from server: %s", ex); overlayUI.done(); } else { HashSet<LHProtos.Pledge> newPledges = new HashSet<>(status.getPledgesList()); if (status.getValuePledgedSoFar() < projectToClaim.getGoalAmount().value) { log.error("Refreshed project status indicates value has changed, is now {}", status.getValuePledgedSoFar()); informationalAlert("Unable to claim", "One or more pledges have been revoked whilst you were waiting."); overlayUI.done(); } else { // Must use newPledges here because a pledge might have been revoked and replaced in the // waiting interval. broadcastClaim(newPledges, key); } } return null; }, Platform::runLater); } private void broadcastClaim(Set<LHProtos.Pledge> pledges, @Nullable KeyParameter key) { try { PledgingWallet.CompletionProgress progress = Main.wallet.completeContractWithFee(projectToClaim, pledges, key); double total = Main.bitcoin.peerGroup().getMinBroadcastConnections() * 2; // two transactions. progress.peersSeen = seen -> { if (seen == -1) { Platform.runLater(onSuccess::run); } else { progressBar.setProgress(seen / total); } }; progress.txFuture.handleAsync((t, ex) -> { if (ex != null) { informationalAlert("Transaction acceptance issue", "At least one peer reported a problem with the transaction: %s", ex); } else { onSuccess.run(); } overlayUI.done(); return null; }, Platform::runLater); } catch (Ex.ValueMismatch e) { // TODO: Solve value mismatch errors. We have a few options. // 1) Try taking away pledges to see if we can get precisely to the target value, e.g. this can // help if everyone agrees up front to pledge 1 BTC exactly, and the goal is 10, but nobody // knows how many people will pledge so we might end up with 11 or 12 BTC. In this situation // we can just randomly drop pledges until we get to the right amount (or allow the user to choose). // 2) Find a way to extend the Bitcoin protocol so the additional money can be allocated to the // project owner and not miners. For instance by allowing new SIGHASH modes that control which // parts of which outputs are signed. This would require a Script 2.0 effort though. // // This should never happen in server assisted mode. log.error("Value mismatch: " + e); informationalAlert("Too much money", "You have gathered pledges that add up to more than the goal. The excess cannot be " + "redeemed in the current version of the software and would end up being paid completely " + "to miners fees. Please remove some pledges and try to hit the goal amount exactly. " + "There is %s too much.", Coin.valueOf(e.byAmount).toFriendlyString()); overlayUI.done(); } catch (InsufficientMoneyException e) { log.error("Insufficient money to claim", e); informationalAlert("Cannot claim pledges", "Closing the contract requires paying Bitcoin network fees, but you don't have enough " + "money in the wallet. Add more money and try again." ); overlayUI.done(); } } private void revoke(@Nullable KeyParameter aesKey) { try { PledgingWallet.Revocation revocation = Main.wallet.revokePledge(pledgeToRevoke, aesKey); revocation.tx.getConfidence().addEventListener((conf, reason) -> { progressBar.setProgress(conf.numBroadcastPeers() / (double) Main.bitcoin.peerGroup().getMinBroadcastConnections()); }, Platform::runLater); Futures.addCallback(revocation.broadcastFuture, new FutureCallback<Transaction>() { @Override public void onSuccess(@Nullable Transaction result) { onSuccess.run(); overlayUI.done(); } @Override public void onFailure(Throwable t) { CrashWindow.open(t); overlayUI.done(); } }, Platform::runLater); } catch (InsufficientMoneyException e) { // This really sucks. In future we should make it a free tx, when we know if we have sufficient // priority to meet the relay rules. log.error("Could not revoke due to insufficient money to pay the fee", e); informationalAlert("Cannot revoke pledge", "Revoking a pledge requires making another Bitcoin transaction on the block chain, but " + "you don't have sufficient funds to pay the required fee. Add more money and try again." ); } } @FXML public void cancelClicked(ActionEvent event) { overlayUI.done(); } }
client/src/main/java/lighthouse/subwindows/RevokeAndClaimWindow.java
package lighthouse.subwindows; import com.google.common.util.concurrent.*; import com.vinumeris.crashfx.*; import javafx.application.*; import javafx.event.*; import javafx.fxml.*; import javafx.scene.control.*; import lighthouse.*; import lighthouse.protocol.*; import lighthouse.wallet.*; import org.bitcoinj.core.*; import org.slf4j.*; import org.spongycastle.crypto.params.*; import javax.annotation.*; import java.util.*; import static com.google.common.base.Preconditions.*; import static lighthouse.utils.GuiUtils.*; /** * Tells the user there's a fee to pay and shows a progress bar that tracks network propagation. Possibly request the * users password first. This is used for both revocation and claiming the contract. */ public class RevokeAndClaimWindow { private static final Logger log = LoggerFactory.getLogger(RevokeAndClaimWindow.class); // Either this ... public LHProtos.Pledge pledgeToRevoke; // Or these ... public Project projectToClaim; public Set<LHProtos.Pledge> pledgesToClaim; // ... are set. pledgesToClaim is IGNORED in server assisted projects however because one of the pledges might have // been revoked whilst the user was sitting on the claim screen. So we always refetch the status and recheck just // before doing the claim. public Runnable onSuccess; public Main.OverlayUI overlayUI; public Button cancelBtn; public Button confirmBtn; public Label explanationLabel; @FXML ProgressBar progressBar; public static Main.OverlayUI<RevokeAndClaimWindow> openForRevoke(LHProtos.Pledge pledgeToRevoke) { Main.OverlayUI<RevokeAndClaimWindow> overlay = Main.instance.overlayUI("subwindows/revoke_and_claim.fxml", "Revoke pledge"); overlay.controller.setForRevoke(pledgeToRevoke); return overlay; } public static Main.OverlayUI<RevokeAndClaimWindow> openForClaim(Project project, Set<LHProtos.Pledge> pledgesToClaim) { Main.OverlayUI<RevokeAndClaimWindow> overlay = Main.instance.overlayUI("subwindows/revoke_and_claim.fxml", "Claim pledges"); overlay.controller.setForClaim(project, pledgesToClaim); return overlay; } private void setForClaim(Project project, Set<LHProtos.Pledge> claim) { projectToClaim = project; pledgesToClaim = claim; explanationLabel.setText("Claiming a project sends all the pledged money to the project's goal address. " + explanationLabel.getText()); } private void setForRevoke(LHProtos.Pledge revoke) { pledgeToRevoke = revoke; explanationLabel.setText("Revoking a pledge returns the money to your wallet. " + explanationLabel.getText()); } @FXML public void confirmClicked(ActionEvent event) { // runLater: shitty hack around RT-37821 (consider upgrading to 8u40 when available and/or applying fix locally) // otherwise pressing enter can cause a crash here when we open a new window with a default button Platform.runLater(this::confirmClicked); } private void confirmClicked() { if (Main.wallet.isEncrypted()) { log.info("Wallet is encrypted, requesting password"); WalletPasswordController.requestPasswordWithNextWindow(key -> { Main.OverlayUI<RevokeAndClaimWindow> screen = Main.instance.overlayUI("subwindows/revoke_and_claim.fxml", "Revoke pledge"); screen.controller.pledgeToRevoke = pledgeToRevoke; screen.controller.projectToClaim = projectToClaim; screen.controller.pledgesToClaim = pledgesToClaim; screen.controller.onSuccess = onSuccess; screen.controller.go(key); }); } else { go(null); } } private void go(@Nullable KeyParameter aesKey) { confirmBtn.setDisable(true); cancelBtn.setDisable(true); if (pledgeToRevoke != null) { revoke(aesKey); } else { checkState(projectToClaim != null); claim(aesKey); } } private void claim(@Nullable KeyParameter key) { if (projectToClaim.getPaymentURL() != null) { claimServerAssisted(key); } else { log.info("Attempting to claim serverless project, proceeding to merge and broadcast"); broadcastClaim(pledgesToClaim, key); } } private void claimServerAssisted(@Nullable KeyParameter key) { // Need to refresh here because we're polling the server and might be lagging behind the true state. // This is kind of a hack. Better solutions would be: // // 1) Check the pledges returned ourselves against p2p network using getutxo: lowers trust in the server // 2) Have server stream updates to client so we are never more than a second or two behind, instead of // a block interval like today. log.info("Attempting to claim server assisted project, refreshing"); progressBar.setProgress(-1); Main.backend.refreshProjectStatusFromServer(projectToClaim, key).handleAsync((status, ex) -> { // On backend thread. if (ex != null) { log.error("Unable to fetch project status", ex); informationalAlert("Unable to claim", "Could not fetch project status from server: %s", ex); overlayUI.done(); } else { HashSet<LHProtos.Pledge> newPledges = new HashSet<>(status.getPledgesList()); if (status.getValuePledgedSoFar() < projectToClaim.getGoalAmount().value) { log.error("Refreshed project status indicates value has changed, is now {}", status.getValuePledgedSoFar()); informationalAlert("Unable to claim", "One or more pledges have been revoked whilst you were waiting."); overlayUI.done(); } else { // Must use newPledges here because a pledge might have been revoked and replaced in the // waiting interval. broadcastClaim(newPledges, key); } } return null; }, Platform::runLater); } private void broadcastClaim(Set<LHProtos.Pledge> pledges, @Nullable KeyParameter key) { try { PledgingWallet.CompletionProgress progress = Main.wallet.completeContractWithFee(projectToClaim, pledges, key); double total = Main.bitcoin.peerGroup().getMinBroadcastConnections() * 2; // two transactions. progress.peersSeen = seen -> { if (seen == -1) { Platform.runLater(onSuccess::run); } else { progressBar.setProgress(seen / total); } }; progress.txFuture.handleAsync((t, ex) -> { if (ex != null) { informationalAlert("Transaction acceptance issue", "At least one peer reported a problem with the transaction: %s", ex); } else { onSuccess.run(); } overlayUI.done(); return null; }, Platform::runLater); } catch (Ex.ValueMismatch e) { // TODO: Solve value mismatch errors. We have a few options. // 1) Try taking away pledges to see if we can get precisely to the target value, e.g. this can // help if everyone agrees up front to pledge 1 BTC exactly, and the goal is 10, but nobody // knows how many people will pledge so we might end up with 11 or 12 BTC. In this situation // we can just randomly drop pledges until we get to the right amount (or allow the user to choose). // 2) Find a way to extend the Bitcoin protocol so the additional money can be allocated to the // project owner and not miners. For instance by allowing new SIGHASH modes that control which // parts of which outputs are signed. This would require a Script 2.0 effort though. // // This should never happen in server assisted mode. log.error("Value mismatch: " + e); informationalAlert("Too much money", "You have gathered pledges that add up to more than the goal. The excess cannot be " + "redeemed in the current version of the software and would end up being paid completely " + "to miners fees. Please remove some pledges and try to hit the goal amount exactly. " + "There is %s too much.", Coin.valueOf(e.byAmount).toFriendlyString()); overlayUI.done(); } catch (InsufficientMoneyException e) { log.error("Insufficient money to claim", e); informationalAlert("Cannot claim pledges", "Closing the contract requires paying Bitcoin network fees, but you don't have enough " + "money in the wallet. Add more money and try again." ); overlayUI.done(); } } private void revoke(@Nullable KeyParameter aesKey) { try { PledgingWallet.Revocation revocation = Main.wallet.revokePledge(pledgeToRevoke, aesKey); revocation.tx.getConfidence().addEventListener((conf, reason) -> { progressBar.setProgress(conf.numBroadcastPeers() / (double) Main.bitcoin.peerGroup().getMinBroadcastConnections()); }, Platform::runLater); Futures.addCallback(revocation.broadcastFuture, new FutureCallback<Transaction>() { @Override public void onSuccess(@Nullable Transaction result) { onSuccess.run(); overlayUI.done(); } @Override public void onFailure(Throwable t) { CrashWindow.open(t); overlayUI.done(); } }, Platform::runLater); } catch (InsufficientMoneyException e) { // This really sucks. In future we should make it a free tx, when we know if we have sufficient // priority to meet the relay rules. log.error("Could not revoke due to insufficient money to pay the fee", e); informationalAlert("Cannot revoke pledge", "Revoking a pledge requires making another Bitcoin transaction on the block chain, but " + "you don't have sufficient funds to pay the required fee. Add more money and try again." ); } } @FXML public void cancelClicked(ActionEvent event) { overlayUI.done(); } }
Fix label on the claim screen when we come back after requesting the password.
client/src/main/java/lighthouse/subwindows/RevokeAndClaimWindow.java
Fix label on the claim screen when we come back after requesting the password.
Java
apache-2.0
5a6fe75d6c8142613cf210f9702c491e2162c8a6
0
smgoller/geode,smgoller/geode,masaki-yamakawa/geode,jdeppe-pivotal/geode,masaki-yamakawa/geode,jdeppe-pivotal/geode,smgoller/geode,masaki-yamakawa/geode,smgoller/geode,jdeppe-pivotal/geode,jdeppe-pivotal/geode,smgoller/geode,jdeppe-pivotal/geode,masaki-yamakawa/geode,smgoller/geode,masaki-yamakawa/geode,jdeppe-pivotal/geode,masaki-yamakawa/geode,masaki-yamakawa/geode,jdeppe-pivotal/geode,smgoller/geode
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.commands; import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_CLUSTER_CONFIGURATION; import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_HOSTNAME_FOR_CLIENTS; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Properties; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.apache.geode.management.internal.cli.shell.Gfsh; import org.apache.geode.management.internal.cli.util.CommandStringBuilder; import org.apache.geode.test.junit.rules.GfshParserRule; public class StartLocatorCommandIntegrationTest { private static final String FAKE_HOSTNAME = "someFakeHostname"; @Rule public GfshParserRule commandRule = new GfshParserRule(); private StartLocatorCommand spy; @Before public void before() throws IOException { final Process process = mock(Process.class); when(process.getInputStream()).thenReturn(mock(InputStream.class)); when(process.getErrorStream()).thenReturn(mock(InputStream.class)); when(process.getOutputStream()).thenReturn(mock(OutputStream.class)); spy = Mockito.spy(StartLocatorCommand.class); doReturn(process).when(spy).getProcess(any(), any()); doReturn(mock(Gfsh.class)).when(spy).getGfsh(); } @Test public void startLocatorWorksWithNoOptions() throws Exception { commandRule.executeAndAssertThat(spy, "start locator"); ArgumentCaptor<Properties> gemfirePropertiesCaptor = ArgumentCaptor.forClass(Properties.class); verify(spy).createStartLocatorCommandLine(any(), any(), any(), gemfirePropertiesCaptor.capture(), any(), any(), any(), any(), any()); Properties gemfireProperties = gemfirePropertiesCaptor.getValue(); assertThat(gemfireProperties).containsKey(ENABLE_CLUSTER_CONFIGURATION); assertThat(gemfireProperties.get(ENABLE_CLUSTER_CONFIGURATION)).isEqualTo("true"); } @Test public void startLocatorRespectsJmxManagerHostnameForClients() throws Exception { String startLocatorCommand = new CommandStringBuilder("start locator") .addOption(JMX_MANAGER_HOSTNAME_FOR_CLIENTS, FAKE_HOSTNAME).toString(); commandRule.executeAndAssertThat(spy, startLocatorCommand); ArgumentCaptor<Properties> gemfirePropertiesCaptor = ArgumentCaptor.forClass(Properties.class); verify(spy).createStartLocatorCommandLine(any(), any(), any(), gemfirePropertiesCaptor.capture(), any(), any(), any(), any(), any()); Properties gemfireProperties = gemfirePropertiesCaptor.getValue(); assertThat(gemfireProperties).containsKey(JMX_MANAGER_HOSTNAME_FOR_CLIENTS); assertThat(gemfireProperties.get(JMX_MANAGER_HOSTNAME_FOR_CLIENTS)).isEqualTo(FAKE_HOSTNAME); } @Test public void startWithBindAddress() throws Exception { commandRule.executeAndAssertThat(spy, "start locator --bind-address=127.0.0.1"); ArgumentCaptor<String[]> commandLines = ArgumentCaptor.forClass(String[].class); verify(spy).getProcess(any(), commandLines.capture()); String[] lines = commandLines.getValue(); assertThat(lines[12]).isEqualTo("--bind-address=127.0.0.1"); } @Test public void startLocatorRespectsHostnameForClients() throws Exception { String startLocatorCommand = new CommandStringBuilder("start locator") .addOption("hostname-for-clients", FAKE_HOSTNAME).toString(); commandRule.executeAndAssertThat(spy, startLocatorCommand); ArgumentCaptor<String[]> commandLines = ArgumentCaptor.forClass(String[].class); verify(spy).getProcess(any(), commandLines.capture()); String[] lines = commandLines.getValue(); assertThat(lines).containsOnlyOnce("--hostname-for-clients=" + FAKE_HOSTNAME); } }
geode-assembly/src/integrationTest/java/org/apache/geode/management/internal/cli/commands/StartLocatorCommandIntegrationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.commands; import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_CLUSTER_CONFIGURATION; import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_HOSTNAME_FOR_CLIENTS; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import java.io.InputStream; import java.io.OutputStream; import java.util.Properties; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.apache.geode.management.internal.cli.shell.Gfsh; import org.apache.geode.management.internal.cli.util.CommandStringBuilder; import org.apache.geode.test.junit.rules.GfshParserRule; public class StartLocatorCommandIntegrationTest { private static final String FAKE_HOSTNAME = "someFakeHostname"; @Rule public GfshParserRule commandRule = new GfshParserRule(); private StartLocatorCommand spy; @Before public void before() { spy = Mockito.spy(StartLocatorCommand.class); doReturn(mock(Gfsh.class)).when(spy).getGfsh(); } @Test public void startLocatorWorksWithNoOptions() throws Exception { commandRule.executeAndAssertThat(spy, "start locator"); ArgumentCaptor<Properties> gemfirePropertiesCaptor = ArgumentCaptor.forClass(Properties.class); verify(spy).createStartLocatorCommandLine(any(), any(), any(), gemfirePropertiesCaptor.capture(), any(), any(), any(), any(), any()); Properties gemfireProperties = gemfirePropertiesCaptor.getValue(); assertThat(gemfireProperties).containsKey(ENABLE_CLUSTER_CONFIGURATION); assertThat(gemfireProperties.get(ENABLE_CLUSTER_CONFIGURATION)).isEqualTo("true"); } @Test public void startLocatorRespectsJmxManagerHostnameForClients() throws Exception { String startLocatorCommand = new CommandStringBuilder("start locator") .addOption(JMX_MANAGER_HOSTNAME_FOR_CLIENTS, FAKE_HOSTNAME).toString(); commandRule.executeAndAssertThat(spy, startLocatorCommand); ArgumentCaptor<Properties> gemfirePropertiesCaptor = ArgumentCaptor.forClass(Properties.class); verify(spy).createStartLocatorCommandLine(any(), any(), any(), gemfirePropertiesCaptor.capture(), any(), any(), any(), any(), any()); Properties gemfireProperties = gemfirePropertiesCaptor.getValue(); assertThat(gemfireProperties).containsKey(JMX_MANAGER_HOSTNAME_FOR_CLIENTS); assertThat(gemfireProperties.get(JMX_MANAGER_HOSTNAME_FOR_CLIENTS)).isEqualTo(FAKE_HOSTNAME); } @Test public void startWithBindAddress() throws Exception { final Process mockProcess = mock(Process.class); doReturn(mock(InputStream.class)).when(mockProcess).getInputStream(); doReturn(mock(InputStream.class)).when(mockProcess).getErrorStream(); doReturn(mock(OutputStream.class)).when(mockProcess).getOutputStream(); doReturn(mockProcess).when(spy).getProcess(any(), any()); commandRule.executeAndAssertThat(spy, "start locator --bind-address=127.0.0.1"); ArgumentCaptor<String[]> commandLines = ArgumentCaptor.forClass(String[].class); verify(spy).getProcess(any(), commandLines.capture()); String[] lines = commandLines.getValue(); assertThat(lines[12]).isEqualTo("--bind-address=127.0.0.1"); } @Test public void startLocatorRespectsHostnameForClients() throws Exception { doReturn(mock(Process.class)).when(spy).getProcess(any(), any()); String startLocatorCommand = new CommandStringBuilder("start locator") .addOption("hostname-for-clients", FAKE_HOSTNAME).toString(); commandRule.executeAndAssertThat(spy, startLocatorCommand); ArgumentCaptor<String[]> commandLines = ArgumentCaptor.forClass(String[].class); verify(spy).getProcess(any(), commandLines.capture()); String[] lines = commandLines.getValue(); assertThat(lines).containsOnlyOnce("--hostname-for-clients=" + FAKE_HOSTNAME); } }
GEODE-9483: Make StartLocatorCommandIntegrationTest not start unneeded locator (#6731) Two of the tests in `StartLocatorCommandIntegrationTest` launch a locator process that binds to the default HTTP service port (7070). This can interfere with the tests that explicitly verify that the system uses the correct HTTP service port by default. Neither test actually needs the launched locator for any purpose. Neither test makes a single assertion about the locator. When the tests exit, they leave the locator running. In fact, the locator typically finishes launching well after the tests have all exited. Rather than fix the port assignment, this commit changes the tests so that they do not actually launch a locator. Two of the tests in this class already do this.
geode-assembly/src/integrationTest/java/org/apache/geode/management/internal/cli/commands/StartLocatorCommandIntegrationTest.java
GEODE-9483: Make StartLocatorCommandIntegrationTest not start unneeded locator (#6731)
Java
apache-2.0
d7672f75dfe37df2e7088b434564d58be20d037b
0
consulo/consulo-napile,consulo/consulo-napile
/* * Copyright 2010-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.napile.idea.plugin.codeInsight; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.mustbe.consulo.RequiredReadAction; import org.napile.compiler.lang.descriptors.CallableDescriptor; import org.napile.compiler.lang.descriptors.ClassifierDescriptor; import org.napile.compiler.lang.descriptors.DeclarationDescriptor; import org.napile.compiler.lang.resolve.BindingTraceKeys; import org.napile.compiler.lang.resolve.BindingTraceUtil; import org.napile.compiler.lang.resolve.BindingTrace; import org.napile.compiler.lang.types.NapileType; import org.napile.compiler.lang.psi.NapileElement; import org.napile.compiler.lang.psi.NapileFile; import org.napile.idea.plugin.module.ModuleAnalyzerUtil; import com.intellij.codeInsight.navigation.actions.TypeDeclarationProvider; import com.intellij.openapi.editor.Editor; import com.intellij.psi.PsiElement; /** * @author Evgeny Gerashchenko * @since 07.05.12 */ public class NapileTypeDeclarationProvider extends TypeDeclarationProvider { @RequiredReadAction @Nullable @Override public PsiElement[] getSymbolTypeDeclarations(@NotNull PsiElement symbol, @Nullable Editor editor, int offset) { if(symbol instanceof NapileElement && symbol.getContainingFile() instanceof NapileFile) { BindingTrace bindingContext = ModuleAnalyzerUtil.lastAnalyze((NapileFile) symbol.getContainingFile()).getBindingTrace(); DeclarationDescriptor descriptor = bindingContext.get(BindingTraceKeys.DECLARATION_TO_DESCRIPTOR, symbol); if(descriptor instanceof CallableDescriptor) { NapileType type = ((CallableDescriptor) descriptor).getReturnType(); if(type != null) { ClassifierDescriptor classifierDescriptor = type.getConstructor().getDeclarationDescriptor(); if(classifierDescriptor != null) { PsiElement typeElement = BindingTraceUtil.descriptorToDeclaration(bindingContext, classifierDescriptor); if(typeElement != null) { return new PsiElement[]{typeElement}; } } } } } return new PsiElement[0]; } }
src/org/napile/idea/plugin/codeInsight/NapileTypeDeclarationProvider.java
/* * Copyright 2010-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.napile.idea.plugin.codeInsight; import org.napile.compiler.lang.descriptors.CallableDescriptor; import org.napile.compiler.lang.descriptors.ClassifierDescriptor; import org.napile.compiler.lang.descriptors.DeclarationDescriptor; import org.napile.compiler.lang.resolve.BindingTraceKeys; import org.napile.compiler.lang.resolve.BindingTraceUtil; import org.napile.compiler.lang.resolve.BindingTrace; import org.napile.compiler.lang.types.NapileType; import org.napile.compiler.lang.psi.NapileElement; import org.napile.compiler.lang.psi.NapileFile; import org.napile.idea.plugin.module.ModuleAnalyzerUtil; import com.intellij.codeInsight.navigation.actions.TypeDeclarationProvider; import com.intellij.psi.PsiElement; /** * @author Evgeny Gerashchenko * @since 07.05.12 */ public class NapileTypeDeclarationProvider implements TypeDeclarationProvider { @Override public PsiElement[] getSymbolTypeDeclarations(PsiElement symbol) { if(symbol instanceof NapileElement && symbol.getContainingFile() instanceof NapileFile) { BindingTrace bindingContext = ModuleAnalyzerUtil.lastAnalyze((NapileFile) symbol.getContainingFile()).getBindingTrace(); DeclarationDescriptor descriptor = bindingContext.get(BindingTraceKeys.DECLARATION_TO_DESCRIPTOR, symbol); if(descriptor instanceof CallableDescriptor) { NapileType type = ((CallableDescriptor) descriptor).getReturnType(); if(type != null) { ClassifierDescriptor classifierDescriptor = type.getConstructor().getDeclarationDescriptor(); if(classifierDescriptor != null) { PsiElement typeElement = BindingTraceUtil.descriptorToDeclaration(bindingContext, classifierDescriptor); if(typeElement != null) { return new PsiElement[]{typeElement}; } } } } } return new PsiElement[0]; } }
compilation
src/org/napile/idea/plugin/codeInsight/NapileTypeDeclarationProvider.java
compilation
Java
apache-2.0
99db06db63f7a84aab598d6ad9990bbf41d2effa
0
prabushi/devstudio-tooling-esb,wso2/devstudio-tooling-esb,prabushi/devstudio-tooling-esb,wso2/devstudio-tooling-esb,wso2/devstudio-tooling-esb,prabushi/devstudio-tooling-esb,wso2/devstudio-tooling-esb,prabushi/devstudio-tooling-esb
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.connections; import java.awt.MouseInfo; import java.util.ArrayList; import org.eclipse.draw2d.FigureCanvas; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.geometry.Point; import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderedShapeEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.ShapeNodeEditPart; import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure; import org.eclipse.swt.widgets.Control; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractBaseFigureEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorFlowCompartmentEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorOutputConnectorEditPart.EastPointerFigure; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorOutputConnectorEditPart.WestPointerFigure; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EsbLinkEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyServiceEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbMultiPageEditor; /* * This class is used to handle automatic connection creation stuffs. * */ public class ConnectionCalculator { private static Point currentFigureLocation = null; private static Point connectorFigureLocation = null; private static final int WIDTH_OF_PROXYCONNECTOR = 82; public static EsbLinkEditPart getNearestLinkEditPart(ArrayList links, AbstractBorderedShapeEditPart childEditPart) { double current = 0.0; ArrayList<EsbLinkEditPart> nearLinks = new ArrayList<EsbLinkEditPart>(); double distance = 0.0; double distanceToUpperLine = 0.0; double distanceToBottomLine = 0.0; EsbLinkEditPart nearestLink = null; EsbMultiPageEditor esbMultiPageEditor = (EsbMultiPageEditor) EditorUtils.getActiveEditor(); double zoom = esbMultiPageEditor.getZoom(); if (childEditPart != null) { updateCurrentStatesForLinks(childEditPart); for (int i = 0; i < links.size(); ++i) { if (!links.get(i).equals(childEditPart)) { int xLeft = ((EsbLinkEditPart) links.get(i)).getFigure() .getBounds().getLeft().x; int xRight = (((EsbLinkEditPart) links.get(i)).getFigure() .getBounds().getRight().x); double actualCurrentPosition = currentFigureLocation.x; if ((xLeft < actualCurrentPosition) && (actualCurrentPosition < xRight)) { nearLinks.add((EsbLinkEditPart) links.get(i)); } } } } for (int q = 0; q < nearLinks.size(); ++q) { if ((((nearLinks.get(q).getFigure().getBounds().getLeft().y) < ((currentFigureLocation.y) + (70*zoom))) && ((currentFigureLocation.y) < (nearLinks .get(q).getFigure().getBounds().getBottomLeft().y))) || (((nearLinks.get(q).getFigure().getBounds().getLeft().y) > (currentFigureLocation.y + (70*zoom))) && ((currentFigureLocation.y) > (nearLinks .get(q).getFigure().getBounds().getBottomLeft().y)))) { return (EsbLinkEditPart) nearLinks.get(q); } distanceToUpperLine = nearLinks.get(q).getFigure().getBounds() .getLeft().y - currentFigureLocation.y; distanceToUpperLine = Math.abs(distanceToUpperLine); distanceToBottomLine = nearLinks.get(q).getFigure().getBounds() .getBottomLeft().y - currentFigureLocation.y; distanceToBottomLine = Math.abs(distanceToBottomLine); if (distanceToUpperLine > distanceToBottomLine) { distance = distanceToBottomLine; } else { distance = distanceToUpperLine; } if (current == 0.0) { current = distance; nearestLink = (EsbLinkEditPart) nearLinks.get(q); } else if (distance < current) { current = distance; nearestLink = (EsbLinkEditPart) nearLinks.get(q); } } if (current > (60*zoom)) { return null; } return nearestLink; } public static ShapeNodeEditPart getNearestConnectorEditPart( ArrayList<AbstractConnectorEditPart> connectors, ShapeNodeEditPart childEditPart) { AbstractConnectorEditPart nearForwardConnector = null; AbstractConnectorEditPart nearReverseConnector = null; AbstractConnectorEditPart nearConnector = null; AbstractConnectorEditPart currentConnector = null; int yCurrent = 0, yDistance1 = 0, yDistance2 = 0; double EastDistance = 0, EastCurrent = 0, WestCurrent = 0, WestDistance = 0; EsbMultiPageEditor esbMultiPageEditor = (EsbMultiPageEditor) EditorUtils.getActiveEditor(); double zoom = esbMultiPageEditor.getZoom(); if (childEditPart != null) { if ((connectors.size() != 0)) { if (connectors.get(0) instanceof AbstractInputConnectorEditPart) { currentConnector = EditorUtils .getInputConnector(childEditPart); } else { currentConnector = EditorUtils .getOutputConnector(childEditPart); } } /* * Drop mediator doesn't have an Output connector. */ if(childEditPart instanceof DropMediatorEditPart){ currentConnector = EditorUtils .getInputConnector(childEditPart); } for (int i = 0; i < connectors.size(); ++i) { IFigure figure = (IFigure) ((DefaultSizeNodeFigure) connectors .get(i).getFigure()).getChildren().get(0); // Skip the Additional output connectors as they are taken into account at the latter part of the code if((figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector.WestPointerFigure)) { continue; } if ((currentConnector!=null)&&(!connectors.get(i).equals(currentConnector)) && (!connectors.get(i).getParent() .equals(currentConnector.getParent()))) { updateCurrentStatesForConnectors(currentConnector); updateCurrentStatesForGivenFigure(connectors.get(i)); double xLeft=connectorFigureLocation.x; double actualCurrentPosition = currentFigureLocation.x; if ((figure instanceof EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorInputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesOutputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesInputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector.EastPointerFigure)) { if((figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutputConnectorEditPart.EastPointerFigure) ||(figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutputConnectorEditPart.EastPointerFigure)){ xLeft=xLeft-WIDTH_OF_PROXYCONNECTOR; } EastDistance = Math.abs(xLeft - actualCurrentPosition); if (((connectors.get(i) instanceof AbstractOutputConnectorEditPart) && (xLeft < actualCurrentPosition)) || ((connectors.get(i) instanceof AbstractInputConnectorEditPart) && (xLeft > actualCurrentPosition))) { if ((EastCurrent == 0) || (EastCurrent > EastDistance)) { EastCurrent = EastDistance; nearForwardConnector = connectors.get(i); } } } else if ((figure instanceof WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractEndpointOutputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyFaultInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutSequenceOutputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutSequenceOutputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector.WestPointerFigure)) { if((figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutSequenceOutputConnectorEditPart.WestPointerFigure) ||(figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutSequenceOutputConnectorEditPart.WestPointerFigure)){ xLeft=xLeft-WIDTH_OF_PROXYCONNECTOR; } WestDistance = Math.abs(xLeft - actualCurrentPosition); if (((connectors.get(i) instanceof AbstractOutputConnectorEditPart) && (xLeft > actualCurrentPosition)) || ((connectors.get(i) instanceof AbstractInputConnectorEditPart) && (xLeft < actualCurrentPosition))) { if ((WestCurrent == 0) || (WestCurrent > WestDistance)) { WestCurrent = WestDistance; nearReverseConnector = connectors.get(i); } } } } } } if (nearForwardConnector != null) { updateCurrentStatesForGivenFigure(nearForwardConnector); if((nearForwardConnector.getParent() instanceof ProxyServiceEditPart) ||(nearForwardConnector.getParent() instanceof APIResourceEditPart)){ int parentY=((AbstractBaseFigureEditPart)nearForwardConnector.getParent()).getLocation().y; yDistance1 = Math .abs(connectorFigureLocation.y-parentY - currentFigureLocation.y); }else{ yDistance1 = Math .abs(connectorFigureLocation.y - currentFigureLocation.y); } } if (nearReverseConnector != null) { updateCurrentStatesForGivenFigure(nearReverseConnector); if((nearReverseConnector.getParent() instanceof ProxyServiceEditPart) ||(nearReverseConnector.getParent() instanceof APIResourceEditPart)){ int parentY=((AbstractBaseFigureEditPart)nearReverseConnector.getParent()).getLocation().y; yDistance2 = Math .abs(connectorFigureLocation.y-parentY - currentFigureLocation.y); }else{ yDistance2 = Math .abs(connectorFigureLocation.y - currentFigureLocation.y); } } if ((yDistance1 != 0) && ((yDistance2 == 0) || (yDistance1 < yDistance2))) { yCurrent = yDistance1; nearConnector = nearForwardConnector; } else { yCurrent = yDistance2; nearConnector = nearReverseConnector; } if (yCurrent > 60*zoom) { nearConnector = null; } /* * If 'nearConnector' is null we have to check again whether dropped * mediator is the first element of a compartment of a complex mediator. * If it is true we have to use following logic to get nearest * connection. */ if (nearConnector == null) { if (childEditPart.getParent() instanceof AbstractMediatorFlowCompartmentEditPart) { int compartmentCenter_y = ((AbstractMediatorFlowCompartmentEditPart) childEditPart .getParent()).getFigure().getBounds().getCenter().y; if (EditorUtils.getMediator(childEditPart.getParent()) != null) { ArrayList<AdditionalOutputConnector> additionalConnectors = EditorUtils .getMediatorAdditionalOutputConnectors(EditorUtils .getMediator(childEditPart.getParent())); AdditionalOutputConnector temp = null; int diff_temp = 0; for (AdditionalOutputConnector con : additionalConnectors) { int diff = Math.abs(con.getLocation().y - compartmentCenter_y); if (diff_temp == 0) { temp = con; diff_temp = diff; } else if (diff < diff_temp) { temp = con; diff_temp = diff; } } if (connectors.get(0) instanceof AbstractOutputConnectorEditPart) { nearConnector = temp; } } } } return nearConnector; } private static void updateCurrentStatesForLinks( ShapeNodeEditPart childEditPart) { int x = MouseInfo.getPointerInfo().getLocation().x; int y = MouseInfo.getPointerInfo().getLocation().y; Control ctrl = childEditPart.getViewer().getControl(); FigureCanvas canvas = (FigureCanvas) ctrl; int horizontal = canvas.getHorizontalBar().getSelection(); int vertical = canvas.getVerticalBar().getSelection(); EsbMultiPageEditor esbMultiPageEditor = (EsbMultiPageEditor) EditorUtils.getActiveEditor(); double zoom = esbMultiPageEditor.getZoom(); /* * Commented following two line of codes to get rid of the issue - When * element is dropped inside the compartment of a complex * mediator(Aggregate etc.) at the border of it, it is connected to * outer mediator flow but resides inside the compartment. */ //horizontal += 20; //vertical += 30; org.eclipse.swt.graphics.Point p = canvas.toDisplay(0, 0); currentFigureLocation = new Point((((x - p.x) + horizontal)/zoom), (((y - p.y) + vertical)/zoom)); } private static void updateCurrentStatesForGivenFigure( ShapeNodeEditPart childEditPart) { connectorFigureLocation = new Point( childEditPart.getLocation().x, childEditPart.getLocation().y); } private static void updateCurrentStatesForConnectors( ShapeNodeEditPart childEditPart) { currentFigureLocation = new Point( childEditPart.getLocation().x, childEditPart.getLocation().y); } }
plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/custom/connections/ConnectionCalculator.java
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.connections; import java.awt.MouseInfo; import java.util.ArrayList; import org.eclipse.draw2d.FigureCanvas; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.geometry.Point; import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderedShapeEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.ShapeNodeEditPart; import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure; import org.eclipse.swt.widgets.Control; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractBaseFigureEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorFlowCompartmentEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorOutputConnectorEditPart.EastPointerFigure; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorOutputConnectorEditPart.WestPointerFigure; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EsbLinkEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyServiceEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbMultiPageEditor; /* * This class is used to handle automatic connection creation stuffs. * */ public class ConnectionCalculator { private static Point currentFigureLocation = null; private static Point connectorFigureLocation = null; public static EsbLinkEditPart getNearestLinkEditPart(ArrayList links, AbstractBorderedShapeEditPart childEditPart) { double current = 0.0; ArrayList<EsbLinkEditPart> nearLinks = new ArrayList<EsbLinkEditPart>(); double distance = 0.0; double distanceToUpperLine = 0.0; double distanceToBottomLine = 0.0; EsbLinkEditPart nearestLink = null; EsbMultiPageEditor esbMultiPageEditor = (EsbMultiPageEditor) EditorUtils.getActiveEditor(); double zoom = esbMultiPageEditor.getZoom(); if (childEditPart != null) { updateCurrentStatesForLinks(childEditPart); for (int i = 0; i < links.size(); ++i) { if (!links.get(i).equals(childEditPart)) { int xLeft = ((EsbLinkEditPart) links.get(i)).getFigure() .getBounds().getLeft().x; int xRight = (((EsbLinkEditPart) links.get(i)).getFigure() .getBounds().getRight().x); double actualCurrentPosition = currentFigureLocation.x; if ((xLeft < actualCurrentPosition) && (actualCurrentPosition < xRight)) { nearLinks.add((EsbLinkEditPart) links.get(i)); } } } } for (int q = 0; q < nearLinks.size(); ++q) { if ((((nearLinks.get(q).getFigure().getBounds().getLeft().y) < ((currentFigureLocation.y) + (70*zoom))) && ((currentFigureLocation.y) < (nearLinks .get(q).getFigure().getBounds().getBottomLeft().y))) || (((nearLinks.get(q).getFigure().getBounds().getLeft().y) > (currentFigureLocation.y + (70*zoom))) && ((currentFigureLocation.y) > (nearLinks .get(q).getFigure().getBounds().getBottomLeft().y)))) { return (EsbLinkEditPart) nearLinks.get(q); } distanceToUpperLine = nearLinks.get(q).getFigure().getBounds() .getLeft().y - currentFigureLocation.y; distanceToUpperLine = Math.abs(distanceToUpperLine); distanceToBottomLine = nearLinks.get(q).getFigure().getBounds() .getBottomLeft().y - currentFigureLocation.y; distanceToBottomLine = Math.abs(distanceToBottomLine); if (distanceToUpperLine > distanceToBottomLine) { distance = distanceToBottomLine; } else { distance = distanceToUpperLine; } if (current == 0.0) { current = distance; nearestLink = (EsbLinkEditPart) nearLinks.get(q); } else if (distance < current) { current = distance; nearestLink = (EsbLinkEditPart) nearLinks.get(q); } } if (current > (60*zoom)) { return null; } return nearestLink; } public static ShapeNodeEditPart getNearestConnectorEditPart( ArrayList<AbstractConnectorEditPart> connectors, ShapeNodeEditPart childEditPart) { AbstractConnectorEditPart nearForwardConnector = null; AbstractConnectorEditPart nearReverseConnector = null; AbstractConnectorEditPart nearConnector = null; AbstractConnectorEditPart currentConnector = null; int yCurrent = 0, yDistance1 = 0, yDistance2 = 0; double EastDistance = 0, EastCurrent = 0, WestCurrent = 0, WestDistance = 0; EsbMultiPageEditor esbMultiPageEditor = (EsbMultiPageEditor) EditorUtils.getActiveEditor(); double zoom = esbMultiPageEditor.getZoom(); if (childEditPart != null) { if ((connectors.size() != 0)) { if (connectors.get(0) instanceof AbstractInputConnectorEditPart) { currentConnector = EditorUtils .getInputConnector(childEditPart); } else { currentConnector = EditorUtils .getOutputConnector(childEditPart); } } /* * Drop mediator doesn't have an Output connector. */ if(childEditPart instanceof DropMediatorEditPart){ currentConnector = EditorUtils .getInputConnector(childEditPart); } for (int i = 0; i < connectors.size(); ++i) { IFigure figure = (IFigure) ((DefaultSizeNodeFigure) connectors .get(i).getFigure()).getChildren().get(0); if ((currentConnector!=null)&&(!connectors.get(i).equals(currentConnector)) && (!connectors.get(i).getParent() .equals(currentConnector.getParent()))) { updateCurrentStatesForConnectors(currentConnector); updateCurrentStatesForGivenFigure(connectors.get(i)); double xLeft=connectorFigureLocation.x; double actualCurrentPosition = currentFigureLocation.x; if ((figure instanceof EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorInputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesOutputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesInputConnectorEditPart.EastPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector.EastPointerFigure)) { if((figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutputConnectorEditPart.EastPointerFigure) ||(figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutputConnectorEditPart.EastPointerFigure)){ xLeft=xLeft-82; } EastDistance = Math.abs(xLeft - actualCurrentPosition); if (((connectors.get(i) instanceof AbstractOutputConnectorEditPart) && (xLeft < actualCurrentPosition)) || ((connectors.get(i) instanceof AbstractInputConnectorEditPart) && (xLeft > actualCurrentPosition))) { if ((EastCurrent == 0) || (EastCurrent > EastDistance)) { EastCurrent = EastDistance; nearForwardConnector = connectors.get(i); } } } else if ((figure instanceof WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractEndpointOutputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyFaultInputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutSequenceOutputConnectorEditPart.WestPointerFigure) || (figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutSequenceOutputConnectorEditPart.WestPointerFigure)) { if((figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyOutSequenceOutputConnectorEditPart.WestPointerFigure) ||(figure instanceof org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceOutSequenceOutputConnectorEditPart.WestPointerFigure)){ xLeft=xLeft-82; } WestDistance = Math.abs(xLeft - actualCurrentPosition); if (((connectors.get(i) instanceof AbstractOutputConnectorEditPart) && (xLeft > actualCurrentPosition)) || ((connectors.get(i) instanceof AbstractInputConnectorEditPart) && (xLeft < actualCurrentPosition))) { if ((WestCurrent == 0) || (WestCurrent > WestDistance)) { WestCurrent = WestDistance; nearReverseConnector = connectors.get(i); } } } } } } if (nearForwardConnector != null) { updateCurrentStatesForGivenFigure(nearForwardConnector); if((nearForwardConnector.getParent() instanceof ProxyServiceEditPart) ||(nearForwardConnector.getParent() instanceof APIResourceEditPart)){ int parentY=((AbstractBaseFigureEditPart)nearForwardConnector.getParent()).getLocation().y; yDistance1 = Math .abs(connectorFigureLocation.y-parentY - currentFigureLocation.y); }else{ yDistance1 = Math .abs(connectorFigureLocation.y - currentFigureLocation.y); } } if (nearReverseConnector != null) { updateCurrentStatesForGivenFigure(nearReverseConnector); if((nearReverseConnector.getParent() instanceof ProxyServiceEditPart) ||(nearReverseConnector.getParent() instanceof APIResourceEditPart)){ int parentY=((AbstractBaseFigureEditPart)nearReverseConnector.getParent()).getLocation().y; yDistance2 = Math .abs(connectorFigureLocation.y-parentY - currentFigureLocation.y); }else{ yDistance2 = Math .abs(connectorFigureLocation.y - currentFigureLocation.y); } } if ((yDistance1 != 0) && ((yDistance2 == 0) || (yDistance1 < yDistance2))) { yCurrent = yDistance1; nearConnector = nearForwardConnector; } else { yCurrent = yDistance2; nearConnector = nearReverseConnector; } if (yCurrent > 60*zoom) { nearConnector = null; } /* * If 'nearConnector' is null we have to check again whether dropped * mediator is the first element of a compartment of a complex mediator. * If it is true we have to use following logic to get nearest * connection. */ if (nearConnector == null) { if (childEditPart.getParent() instanceof AbstractMediatorFlowCompartmentEditPart) { int compartmentCenter_y = ((AbstractMediatorFlowCompartmentEditPart) childEditPart .getParent()).getFigure().getBounds().getCenter().y; if (EditorUtils.getMediator(childEditPart.getParent()) != null) { ArrayList<AdditionalOutputConnector> additionalConnectors = EditorUtils .getMediatorAdditionalOutputConnectors(EditorUtils .getMediator(childEditPart.getParent())); AdditionalOutputConnector temp = null; int diff_temp = 0; for (AdditionalOutputConnector con : additionalConnectors) { int diff = Math.abs(con.getLocation().y - compartmentCenter_y); if (diff_temp == 0) { temp = con; diff_temp = diff; } else if (diff < diff_temp) { temp = con; diff_temp = diff; } } if (connectors.get(0) instanceof AbstractOutputConnectorEditPart) { nearConnector = temp; } } } } return nearConnector; } private static void updateCurrentStatesForLinks( ShapeNodeEditPart childEditPart) { int x = MouseInfo.getPointerInfo().getLocation().x; int y = MouseInfo.getPointerInfo().getLocation().y; Control ctrl = childEditPart.getViewer().getControl(); FigureCanvas canvas = (FigureCanvas) ctrl; int horizontal = canvas.getHorizontalBar().getSelection(); int vertical = canvas.getVerticalBar().getSelection(); EsbMultiPageEditor esbMultiPageEditor = (EsbMultiPageEditor) EditorUtils.getActiveEditor(); double zoom = esbMultiPageEditor.getZoom(); /* * Commented following two line of codes to get rid of the issue - When * element is dropped inside the compartment of a complex * mediator(Aggregate etc.) at the border of it, it is connected to * outer mediator flow but resides inside the compartment. */ //horizontal += 20; //vertical += 30; org.eclipse.swt.graphics.Point p = canvas.toDisplay(0, 0); currentFigureLocation = new Point((((x - p.x) + horizontal)/zoom), (((y - p.y) + vertical)/zoom)); } private static void updateCurrentStatesForGivenFigure( ShapeNodeEditPart childEditPart) { connectorFigureLocation = new Point( childEditPart.getLocation().x, childEditPart.getLocation().y); } private static void updateCurrentStatesForConnectors( ShapeNodeEditPart childEditPart) { currentFigureLocation = new Point( childEditPart.getLocation().x, childEditPart.getLocation().y); } }
fix dropping mediators at wrong places inside a complex mediators
plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/custom/connections/ConnectionCalculator.java
fix dropping mediators at wrong places inside a complex mediators
Java
apache-2.0
efab7b49e8ec851b4a78275fd7a3739725bfebf4
0
apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere
/* * Copyright 2016-2018 shardingsphere.io. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package io.shardingsphere.jdbc.orchestration.spring; import io.shardingsphere.core.api.ConfigMapContext; import io.shardingsphere.core.api.config.strategy.InlineShardingStrategyConfiguration; import io.shardingsphere.core.api.config.strategy.StandardShardingStrategyConfiguration; import io.shardingsphere.core.constant.properties.ShardingProperties; import io.shardingsphere.core.constant.properties.ShardingPropertiesConstant; import io.shardingsphere.core.jdbc.core.datasource.ShardingDataSource; import io.shardingsphere.core.rule.BindingTableRule; import io.shardingsphere.core.rule.DataNode; import io.shardingsphere.core.rule.ShardingRule; import io.shardingsphere.core.rule.TableRule; import io.shardingsphere.jdbc.orchestration.spring.datasource.OrchestrationSpringShardingDataSource; import io.shardingsphere.jdbc.orchestration.spring.fixture.IncrementKeyGenerator; import io.shardingsphere.jdbc.orchestration.spring.util.EmbedTestingServer; import io.shardingsphere.jdbc.orchestration.spring.util.FieldValueUtil; import org.junit.BeforeClass; import org.junit.Test; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests; import javax.sql.DataSource; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import static junit.framework.TestCase.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; @ContextConfiguration(locations = "classpath:META-INF/rdb/shardingOrchestration.xml") public class OrchestrationShardingNamespaceTest extends AbstractJUnit4SpringContextTests { @BeforeClass public static void init() { EmbedTestingServer.start(); } @Test public void assertSimpleShardingDataSource() { Map<String, DataSource> dataSourceMap = getDataSourceMap("simpleShardingOrchestration"); ShardingRule shardingRule = getShardingRule("simpleShardingOrchestration"); assertNotNull(dataSourceMap.get("dbtbl_0")); assertThat(shardingRule.getTableRules().size(), is(1)); assertThat(shardingRule.getTableRules().iterator().next().getLogicTable(), is("t_order")); } @Test public void assertShardingRuleWithAttributesDataSource() { Map<String, DataSource> dataSourceMap = getDataSourceMap("shardingRuleWithAttributesDataSourceOrchestration"); ShardingRule shardingRule = getShardingRule("shardingRuleWithAttributesDataSourceOrchestration"); assertNotNull(dataSourceMap.get("dbtbl_0")); assertNotNull(dataSourceMap.get("dbtbl_1")); assertThat(shardingRule.getShardingDataSourceNames().getDefaultDataSourceName(), is("dbtbl_0")); assertTrue(Arrays.equals(shardingRule.getDefaultDatabaseShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("standardStrategy", StandardShardingStrategyConfiguration.class).getShardingColumn()})); assertTrue(Arrays.equals(shardingRule.getDefaultTableShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("inlineStrategy", InlineShardingStrategyConfiguration.class).getShardingColumn()})); assertThat(shardingRule.getDefaultKeyGenerator().getClass().getName(), is(IncrementKeyGenerator.class.getCanonicalName())); } @Test public void assertTableRuleWithAttributesDataSource() { ShardingRule shardingRule = getShardingRule("tableRuleWithAttributesDataSourceOrchestration"); assertThat(shardingRule.getTableRules().size(), is(1)); TableRule tableRule = shardingRule.getTableRules().iterator().next(); assertThat(tableRule.getLogicTable(), is("t_order")); assertThat(tableRule.getActualDataNodes().size(), is(8)); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_0"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_1"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_2"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_3"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_0"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_1"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_2"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_3"))); assertTrue(Arrays.equals(tableRule.getDatabaseShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("standardStrategy", StandardShardingStrategyConfiguration.class).getShardingColumn()})); assertTrue(Arrays.equals(tableRule.getTableShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("inlineStrategy", InlineShardingStrategyConfiguration.class).getShardingColumn()})); assertThat(tableRule.getGenerateKeyColumn(), is("order_id")); assertThat(tableRule.getKeyGenerator().getClass().getName(), is(IncrementKeyGenerator.class.getCanonicalName())); } @Test public void assertMultiTableRulesDataSource() { ShardingRule shardingRule = getShardingRule("multiTableRulesDataSourceOrchestration"); assertThat(shardingRule.getTableRules().size(), is(2)); Iterator<TableRule> tableRules = shardingRule.getTableRules().iterator(); assertThat(tableRules.next().getLogicTable(), is("t_order")); assertThat(tableRules.next().getLogicTable(), is("t_order_item")); } @Test public void assertBindingTableRuleDatasource() { ShardingRule shardingRule = getShardingRule("bindingTableRuleDatasourceOrchestration"); assertThat(shardingRule.getBindingTableRules().size(), is(1)); BindingTableRule bindingTableRule = shardingRule.getBindingTableRules().iterator().next(); assertThat(bindingTableRule.getBindingActualTable("dbtbl_0", "t_order", "t_order_item"), is("t_order")); assertThat(bindingTableRule.getBindingActualTable("dbtbl_1", "t_order", "t_order_item"), is("t_order")); } @Test public void assertMultiBindingTableRulesDatasource() { ShardingRule shardingRule = getShardingRule("multiBindingTableRulesDatasourceOrchestration"); assertThat(shardingRule.getBindingTableRules().size(), is(2)); Iterator<BindingTableRule> bindingTableRules = shardingRule.getBindingTableRules().iterator(); BindingTableRule orderRule = bindingTableRules.next(); assertThat(orderRule.getBindingActualTable("dbtbl_0", "t_order", "t_order_item"), is("t_order")); assertThat(orderRule.getBindingActualTable("dbtbl_1", "t_order", "t_order_item"), is("t_order")); BindingTableRule userRule = bindingTableRules.next(); assertThat(userRule.getBindingActualTable("dbtbl_0", "t_user", "t_user_detail"), is("t_user")); assertThat(userRule.getBindingActualTable("dbtbl_1", "t_user", "t_user_detail"), is("t_user")); } @Test public void assertPropsDataSource() { OrchestrationSpringShardingDataSource shardingDataSource = this.applicationContext.getBean("propsDataSourceOrchestration", OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(shardingDataSource, "dataSource", true); Map<String, Object> configMap = new HashMap<>(); configMap.put("key1", "value1"); assertThat(ConfigMapContext.getInstance().getShardingConfig(), is(configMap)); Object shardingContext = FieldValueUtil.getFieldValue(dataSource, "shardingContext", true); assertTrue((boolean) FieldValueUtil.getFieldValue(shardingContext, "showSQL")); ShardingProperties shardingProperties = (ShardingProperties) FieldValueUtil.getFieldValue(dataSource, "shardingProperties", true); boolean showSql = shardingProperties.getValue(ShardingPropertiesConstant.SQL_SHOW); assertTrue(showSql); int executorSize = shardingProperties.getValue(ShardingPropertiesConstant.EXECUTOR_SIZE); assertThat(executorSize, is(10)); assertNull(ShardingPropertiesConstant.findByKey("foo")); } @Test public void assertShardingDataSourceType() { assertTrue(this.applicationContext.getBean("simpleShardingOrchestration") instanceof OrchestrationSpringShardingDataSource); } @Test public void assertDefaultActualDataNodes() { OrchestrationSpringShardingDataSource multiTableRulesDataSource = this.applicationContext.getBean("multiTableRulesDataSourceOrchestration", OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(multiTableRulesDataSource, "dataSource", true); Object shardingContext = FieldValueUtil.getFieldValue(dataSource, "shardingContext", true); ShardingRule shardingRule = (ShardingRule) FieldValueUtil.getFieldValue(shardingContext, "shardingRule"); assertThat(shardingRule.getTableRules().size(), is(2)); Iterator<TableRule> tableRules = shardingRule.getTableRules().iterator(); TableRule orderRule = tableRules.next(); assertThat(orderRule.getActualDataNodes().size(), is(2)); assertTrue(orderRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order"))); assertTrue(orderRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order"))); TableRule orderItemRule = tableRules.next(); assertThat(orderItemRule.getActualDataNodes().size(), is(2)); assertTrue(orderItemRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_item"))); assertTrue(orderItemRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_item"))); } @SuppressWarnings("unchecked") private Map<String, DataSource> getDataSourceMap(final String shardingDataSourceName) { OrchestrationSpringShardingDataSource shardingDataSource = this.applicationContext.getBean(shardingDataSourceName, OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(shardingDataSource, "dataSource", true); return dataSource.getDataSourceMap(); } private ShardingRule getShardingRule(final String shardingDataSourceName) { OrchestrationSpringShardingDataSource shardingDataSource = this.applicationContext.getBean(shardingDataSourceName, OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(shardingDataSource, "dataSource", true); Object shardingContext = FieldValueUtil.getFieldValue(dataSource, "shardingContext", true); return (ShardingRule) FieldValueUtil.getFieldValue(shardingContext, "shardingRule"); } }
sharding-orchestration/sharding-jdbc-orchestration-spring/sharding-jdbc-orchestration-spring-namespace/src/test/java/io/shardingsphere/jdbc/orchestration/spring/OrchestrationShardingNamespaceTest.java
/* * Copyright 2016-2018 shardingsphere.io. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package io.shardingsphere.jdbc.orchestration.spring; import io.shardingsphere.core.api.ConfigMapContext; import io.shardingsphere.core.api.config.strategy.InlineShardingStrategyConfiguration; import io.shardingsphere.core.api.config.strategy.StandardShardingStrategyConfiguration; import io.shardingsphere.core.constant.properties.ShardingProperties; import io.shardingsphere.core.constant.properties.ShardingPropertiesConstant; import io.shardingsphere.core.jdbc.core.datasource.ShardingDataSource; import io.shardingsphere.core.rule.BindingTableRule; import io.shardingsphere.core.rule.DataNode; import io.shardingsphere.core.rule.ShardingRule; import io.shardingsphere.core.rule.TableRule; import io.shardingsphere.jdbc.orchestration.spring.datasource.OrchestrationSpringShardingDataSource; import io.shardingsphere.jdbc.orchestration.spring.fixture.IncrementKeyGenerator; import io.shardingsphere.jdbc.orchestration.spring.util.EmbedTestingServer; import io.shardingsphere.jdbc.orchestration.spring.util.FieldValueUtil; import org.junit.BeforeClass; import org.junit.Test; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests; import javax.sql.DataSource; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import static junit.framework.TestCase.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; @ContextConfiguration(locations = "classpath:META-INF/rdb/shardingOrchestration.xml") public class OrchestrationShardingNamespaceTest extends AbstractJUnit4SpringContextTests { @BeforeClass public static void init() { EmbedTestingServer.start(); } @Test public void assertSimpleShardingDataSource() { Map<String, DataSource> dataSourceMap = getDataSourceMap("simpleShardingOrchestration"); ShardingRule shardingRule = getShardingRule("simpleShardingOrchestration"); assertNotNull(dataSourceMap.get("dbtbl_0")); assertThat(shardingRule.getTableRules().size(), is(1)); assertThat(shardingRule.getTableRules().iterator().next().getLogicTable(), is("t_order")); } @Test public void assertShardingRuleWithAttributesDataSource() { Map<String, DataSource> dataSourceMap = getDataSourceMap("shardingRuleWithAttributesDataSourceOrchestration"); ShardingRule shardingRule = getShardingRule("shardingRuleWithAttributesDataSourceOrchestration"); assertNotNull(dataSourceMap.get("dbtbl_0")); assertNotNull(dataSourceMap.get("dbtbl_1")); assertThat(shardingRule.getShardingDataSourceNames().getDefaultDataSourceName(), is("dbtbl_0")); assertTrue(Arrays.equals(shardingRule.getDefaultDatabaseShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("standardStrategy", StandardShardingStrategyConfiguration.class).getShardingColumn()})); assertTrue(Arrays.equals(shardingRule.getDefaultTableShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("inlineStrategy", InlineShardingStrategyConfiguration.class).getShardingColumn()})); assertThat(shardingRule.getDefaultKeyGenerator().getClass().getName(), is(IncrementKeyGenerator.class.getCanonicalName())); } @Test public void assertTableRuleWithAttributesDataSource() { ShardingRule shardingRule = getShardingRule("tableRuleWithAttributesDataSourceOrchestration"); assertThat(shardingRule.getTableRules().size(), is(1)); TableRule tableRule = shardingRule.getTableRules().iterator().next(); assertThat(tableRule.getLogicTable(), is("t_order")); assertThat(tableRule.getActualDataNodes().size(), is(8)); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_0"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_1"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_2"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_3"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_0"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_1"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_2"))); assertTrue(tableRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_3"))); assertTrue(Arrays.equals(tableRule.getDatabaseShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("standardStrategy", StandardShardingStrategyConfiguration.class).getShardingColumn()})); assertTrue(Arrays.equals(tableRule.getTableShardingStrategy().getShardingColumns().toArray(new String[]{}), new String[]{this.applicationContext.getBean("inlineStrategy", InlineShardingStrategyConfiguration.class).getShardingColumn()})); assertThat(tableRule.getGenerateKeyColumn(), is("order_id")); assertThat(tableRule.getKeyGenerator().getClass().getName(), is(IncrementKeyGenerator.class.getCanonicalName())); } @Test public void assertMultiTableRulesDataSource() { ShardingRule shardingRule = getShardingRule("multiTableRulesDataSourceOrchestration"); assertThat(shardingRule.getTableRules().size(), is(2)); Iterator<TableRule> tableRules = shardingRule.getTableRules().iterator(); assertThat(tableRules.next().getLogicTable(), is("t_order")); assertThat(tableRules.next().getLogicTable(), is("t_order_item")); } @Test public void assertBindingTableRuleDatasource() { ShardingRule shardingRule = getShardingRule("bindingTableRuleDatasourceOrchestration"); assertThat(shardingRule.getBindingTableRules().size(), is(1)); BindingTableRule bindingTableRule = shardingRule.getBindingTableRules().iterator().next(); assertThat(bindingTableRule.getBindingActualTable("dbtbl_0", "t_order", "t_order_item"), is("t_order")); assertThat(bindingTableRule.getBindingActualTable("dbtbl_1", "t_order", "t_order_item"), is("t_order")); } @Test public void assertMultiBindingTableRulesDatasource() { ShardingRule shardingRule = getShardingRule("multiBindingTableRulesDatasourceOrchestration"); assertThat(shardingRule.getBindingTableRules().size(), is(2)); Iterator<BindingTableRule> bindingTableRules = shardingRule.getBindingTableRules().iterator(); BindingTableRule orderRule = bindingTableRules.next(); assertThat(orderRule.getBindingActualTable("dbtbl_0", "t_order", "t_order_item"), is("t_order")); assertThat(orderRule.getBindingActualTable("dbtbl_1", "t_order", "t_order_item"), is("t_order")); BindingTableRule userRule = bindingTableRules.next(); assertThat(userRule.getBindingActualTable("dbtbl_0", "t_user", "t_user_detail"), is("t_user")); assertThat(userRule.getBindingActualTable("dbtbl_1", "t_user", "t_user_detail"), is("t_user")); } @Test public void assertPropsDataSource() { OrchestrationSpringShardingDataSource shardingDataSource = this.applicationContext.getBean("propsDataSourceOrchestration", OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(shardingDataSource, "dataSource", true); Map<String, Object> configMap = new HashMap<>(); configMap.put("key1", "value1"); assertThat(ConfigMapContext.getInstance().getShardingConfig(), is(configMap)); Object shardingContext = FieldValueUtil.getFieldValue(dataSource, "shardingContext"); assertTrue((boolean) FieldValueUtil.getFieldValue(shardingContext, "showSQL")); ShardingProperties shardingProperties = (ShardingProperties) FieldValueUtil.getFieldValue(dataSource, "shardingProperties"); boolean showSql = shardingProperties.getValue(ShardingPropertiesConstant.SQL_SHOW); assertTrue(showSql); int executorSize = shardingProperties.getValue(ShardingPropertiesConstant.EXECUTOR_SIZE); assertThat(executorSize, is(10)); assertNull(ShardingPropertiesConstant.findByKey("foo")); } @Test public void assertShardingDataSourceType() { assertTrue(this.applicationContext.getBean("simpleShardingOrchestration") instanceof OrchestrationSpringShardingDataSource); } @Test public void assertDefaultActualDataNodes() { OrchestrationSpringShardingDataSource multiTableRulesDataSource = this.applicationContext.getBean("multiTableRulesDataSourceOrchestration", OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(multiTableRulesDataSource, "dataSource", true); Object shardingContext = FieldValueUtil.getFieldValue(dataSource, "shardingContext"); ShardingRule shardingRule = (ShardingRule) FieldValueUtil.getFieldValue(shardingContext, "shardingRule"); assertThat(shardingRule.getTableRules().size(), is(2)); Iterator<TableRule> tableRules = shardingRule.getTableRules().iterator(); TableRule orderRule = tableRules.next(); assertThat(orderRule.getActualDataNodes().size(), is(2)); assertTrue(orderRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order"))); assertTrue(orderRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order"))); TableRule orderItemRule = tableRules.next(); assertThat(orderItemRule.getActualDataNodes().size(), is(2)); assertTrue(orderItemRule.getActualDataNodes().contains(new DataNode("dbtbl_0", "t_order_item"))); assertTrue(orderItemRule.getActualDataNodes().contains(new DataNode("dbtbl_1", "t_order_item"))); } @SuppressWarnings("unchecked") private Map<String, DataSource> getDataSourceMap(final String shardingDataSourceName) { OrchestrationSpringShardingDataSource shardingDataSource = this.applicationContext.getBean(shardingDataSourceName, OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(shardingDataSource, "dataSource", true); return dataSource.getDataSourceMap(); } private ShardingRule getShardingRule(final String shardingDataSourceName) { OrchestrationSpringShardingDataSource shardingDataSource = this.applicationContext.getBean(shardingDataSourceName, OrchestrationSpringShardingDataSource.class); ShardingDataSource dataSource = (ShardingDataSource) FieldValueUtil.getFieldValue(shardingDataSource, "dataSource", true); Object shardingContext = FieldValueUtil.getFieldValue(dataSource, "shardingContext"); return (ShardingRule) FieldValueUtil.getFieldValue(shardingContext, "shardingRule"); } }
modify codes
sharding-orchestration/sharding-jdbc-orchestration-spring/sharding-jdbc-orchestration-spring-namespace/src/test/java/io/shardingsphere/jdbc/orchestration/spring/OrchestrationShardingNamespaceTest.java
modify codes
Java
apache-2.0
cdf9d72bc918a0675cae8ae712ce89909f44b0b9
0
jvasileff/ceylon-dart,jvasileff/ceylon-dart,jvasileff/ceylon-dart,jvasileff/ceylon-dart
package com.vasileff.ceylon.dart.compiler; public enum Warning { filenameNonAscii, filenameCaselessCollision, deprecation, disjointEquals, disjointContainment, compilerAnnotation, doclink, expressionTypeNothing, unusedDeclaration, unusedImport, ceylonNamespace, javaNamespace, suppressedAlready, suppressesNothing, unknownWarning, ambiguousAnnotation, similarModule, importsOtherJdk, javaAnnotationElement, syntaxDeprecation, smallIgnored, literalNotSmall, redundantNarrowing, redundantIteration, missingImportPrefix, uncheckedTypeArguments, expressionTypeCallable, uncheckedType, unsupported, undefinedEquality, inferredNotNull }
ceylon-dart-compiler/source/com/vasileff/ceylon/dart/compiler/Warning.java
package com.vasileff.ceylon.dart.compiler; public enum Warning { filenameNonAscii, filenameCaselessCollision, deprecation, disjointEquals, disjointContainment, compilerAnnotation, doclink, expressionTypeNothing, unusedDeclaration, unusedImport, ceylonNamespace, javaNamespace, suppressedAlready, suppressesNothing, unknownWarning, ambiguousAnnotation, similarModule, importsOtherJdk, javaAnnotationElement, syntaxDeprecation, smallIgnored, literalNotSmall, redundantNarrowing, redundantIteration, missingImportPrefix, uncheckedTypeArguments, expressionTypeCallable, uncheckedType, unsupported, undefinedEquality }
add the inferredNotNull warning
ceylon-dart-compiler/source/com/vasileff/ceylon/dart/compiler/Warning.java
add the inferredNotNull warning
Java
apache-2.0
78152a2cb39d40943e7b986842ddf5334e5d24d4
0
adobe/S3Mock,adobe/S3Mock,adobe/S3Mock
/* * Copyright 2017-2022 Adobe. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adobe.testing.s3mock; import static com.adobe.testing.s3mock.FileStoreController.collapseCommonPrefixes; import static com.adobe.testing.s3mock.FileStoreController.filterBucketContentsBy; import static com.adobe.testing.s3mock.util.AwsHttpParameters.ENCODING_TYPE; import static com.adobe.testing.s3mock.util.AwsHttpParameters.MAX_KEYS; import static java.util.stream.Collectors.toList; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.head; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import com.adobe.testing.s3mock.dto.Bucket; import com.adobe.testing.s3mock.dto.BucketContents; import com.adobe.testing.s3mock.dto.Buckets; import com.adobe.testing.s3mock.dto.ListAllMyBucketsResult; import com.adobe.testing.s3mock.dto.ListBucketResult; import com.adobe.testing.s3mock.dto.Owner; import com.adobe.testing.s3mock.store.FileStore; import com.adobe.testing.s3mock.store.KmsKeyStore; import com.adobe.testing.s3mock.store.S3Object; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.xml.XmlMapper; import java.io.IOException; import java.nio.file.Paths; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureWebMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.result.MockMvcResultMatchers; @AutoConfigureWebMvc @AutoConfigureMockMvc @SpringBootTest(classes = {S3MockConfiguration.class}) class FileStoreControllerTest { //verbatim copy from FileStoreController / FileStore private static final Owner TEST_OWNER = new Owner(123, "s3-mock-file-store"); private static final ObjectMapper MAPPER = new XmlMapper(); private static final String[] ALL_OBJECTS = new String[] {"3330/0", "33309/0", "a", "b", "b/1", "b/1/1", "b/1/2", "b/2", "c/1", "c/1/1", "d:1", "d:1:1", "eor.txt", "foo/eor.txt"}; private static final String TEST_BUCKET_NAME = "testBucket"; private static final Bucket TEST_BUCKET = new Bucket(Paths.get("/tmp/foo/1"), TEST_BUCKET_NAME, Instant.now().toString()); @MockBean private KmsKeyStore kmsKeyStore; //Dependency of S3MockConfiguration. @MockBean private FileStore fileStore; @Autowired private MockMvc mockMvc; @Test void testListBuckets_Ok() throws Exception { List<Bucket> bucketList = new ArrayList<>(); bucketList.add(TEST_BUCKET); bucketList.add(new Bucket(Paths.get("/tmp/foo/2"), "testBucket1", Instant.now().toString())); when(fileStore.listBuckets()).thenReturn(bucketList); ListAllMyBucketsResult expected = new ListAllMyBucketsResult(); Buckets buckets = new Buckets(); buckets.setBuckets(bucketList); expected.setBuckets(buckets); expected.setOwner(TEST_OWNER); mockMvc.perform( get("/") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.content().contentType(MediaType.APPLICATION_XML)) .andExpect(MockMvcResultMatchers.content().xml(MAPPER.writeValueAsString(expected))); } @Test void testListBuckets_Empty() throws Exception { when(fileStore.listBuckets()).thenReturn(Collections.emptyList()); ListAllMyBucketsResult expected = new ListAllMyBucketsResult(); expected.setOwner(TEST_OWNER); mockMvc.perform( get("/") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.content().contentType(MediaType.APPLICATION_XML)) .andExpect(MockMvcResultMatchers.content().xml(MAPPER.writeValueAsString(expected))); } @Test void testHeadBucket_Ok() throws Exception { when(fileStore.doesBucketExist(TEST_BUCKET_NAME)).thenReturn(true); mockMvc.perform( head("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()); } @Test void testHeadBucket_NotFound() throws Exception { when(fileStore.doesBucketExist(TEST_BUCKET_NAME)).thenReturn(false); mockMvc.perform( head("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isNotFound()); } @Test void testCreateBucket_Ok() throws Exception { mockMvc.perform( put("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()); } @Test void testCreateBucket_InternalServerError() throws Exception { when(fileStore.createBucket(TEST_BUCKET_NAME)) .thenThrow(new RuntimeException("THIS IS EXPECTED")); mockMvc.perform( put("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isInternalServerError()); } @Test void testDeleteBucket_NoContent() throws Exception { givenBucket(); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)).thenReturn(Collections.emptyList()); when(fileStore.deleteBucket(TEST_BUCKET_NAME)).thenReturn(true); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isNoContent()); } @Test void testDeleteBucket_NotFound() throws Exception { givenBucket(); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)).thenReturn(Collections.emptyList()); when(fileStore.deleteBucket(TEST_BUCKET_NAME)).thenReturn(false); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isNotFound()); } @Test void testDeleteBucket_Conflict() throws Exception { givenBucket(); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)) .thenReturn(Collections.singletonList(new S3Object())); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isConflict()); } @Test void testDeleteBucket_InternalServerError() throws Exception { givenBucket(); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)) .thenThrow(new IOException("THIS IS EXPECTED")); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isInternalServerError()); } @Test void testListObjectsInsideBucket_BadRequest() throws Exception { givenBucket(); mockMvc.perform( get("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) .queryParam(MAX_KEYS, "-1") ).andExpect(MockMvcResultMatchers.status().isBadRequest()); mockMvc.perform( get("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) .queryParam(ENCODING_TYPE, "not_valid") ).andExpect(MockMvcResultMatchers.status().isBadRequest()); } @Test void testListObjectsInsideBucket_InternalServerError() throws Exception { givenBucket(); String prefix = null; when(fileStore.getS3Objects(TEST_BUCKET_NAME, prefix)) .thenThrow(new IOException("THIS IS EXPECTED")); mockMvc.perform( get("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isInternalServerError()); } @Test void testListObjectsInsideBucket_Ok() throws Exception { givenBucket(); String key = "key"; String prefix = null; BucketContents bucketContents = bucketContents(key); ListBucketResult expected = new ListBucketResult(TEST_BUCKET_NAME, null, null, 1000, false, null, null, Collections.singletonList(bucketContents), Collections.emptyList()); when(fileStore.getS3Objects(TEST_BUCKET_NAME, prefix)) .thenReturn(Collections.singletonList(s3Object(key))); mockMvc.perform( get("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.content().contentType(MediaType.APPLICATION_XML)) .andExpect(MockMvcResultMatchers.content().xml(MAPPER.writeValueAsString(expected))); } private void givenBucket() { when(fileStore.getBucket(TEST_BUCKET_NAME)).thenReturn(TEST_BUCKET); } private BucketContents bucketContents(String id) { return new BucketContents(id, "1234", "etag", "size", "STANDARD", TEST_OWNER); } private S3Object s3Object(String id) { S3Object s3Object = new S3Object(); s3Object.setName(id); s3Object.setModificationDate("1234"); s3Object.setMd5("etag"); s3Object.setSize("size"); return s3Object; } /** * Parameter factory. * Taken from ListObjectIT to make sure we unit test against the same data. */ public static Iterable<Param> data() { return Arrays.asList( param(null, null).keys(ALL_OBJECTS), param("", null).keys(ALL_OBJECTS), param(null, "").keys(ALL_OBJECTS), param(null, "/").keys("a", "b", "d:1", "d:1:1", "eor.txt") .prefixes("3330/", "foo/", "c/", "b/", "33309/"), param("", "").keys(ALL_OBJECTS), param("/", null), param("b", null).keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), param("b/", null).keys("b/1", "b/1/1", "b/1/2", "b/2"), param("b", "").keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), param("b", "/").keys("b").prefixes("b/"), param("b/", "/").keys("b/1", "b/2").prefixes("b/1/"), param("b/1", "/").keys("b/1").prefixes("b/1/"), param("b/1/", "/").keys("b/1/1", "b/1/2"), param("c", "/").prefixes("c/"), param("c/", "/").keys("c/1").prefixes("c/1/"), param("eor", "/").keys("eor.txt") ); } @ParameterizedTest @MethodSource("data") public void testCommonPrefixesAndBucketContentFilter(final Param parameters) { String prefix = parameters.prefix; String delimiter = parameters.delimiter; List<BucketContents> bucketContents = createBucketContentsList(prefix); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); List<BucketContents> filteredBucketContents = filterBucketContentsBy(bucketContents, commonPrefixes); String[] expectedPrefixes = parameters.expectedPrefixes; String[] expectedKeys = parameters.expectedKeys; assertThat(commonPrefixes).hasSize(expectedPrefixes.length); assertThat(commonPrefixes) .as("Returned prefixes are correct") .containsExactlyInAnyOrderElementsOf(Arrays.asList(expectedPrefixes)); assertThat(filteredBucketContents.stream().map(BucketContents::getKey).collect(toList())) .as("Returned keys are correct") .containsExactlyInAnyOrderElementsOf(Arrays.asList(expectedKeys)); } @Test void testCommonPrefixesNoPrefixNoDelimiter() { String prefix = ""; String delimiter = ""; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(0); } @Test void testCommonPrefixesPrefixNoDelimiter() { String prefix = "prefixa"; String delimiter = ""; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(0); } @Test void testCommonPrefixesNoPrefixDelimiter() { String prefix = ""; String delimiter = "/"; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(5).contains("3330/", "foo/", "c/", "b/", "33309/"); } @Test void testCommonPrefixesPrefixDelimiter() { String prefix = "3330"; String delimiter = "/"; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(2).contains("3330/", "33309/"); } List<BucketContents> createBucketContentsList() { return createBucketContentsList(null); } List<BucketContents> createBucketContentsList(String prefix) { List<BucketContents> list = new ArrayList<>(); for (String object : ALL_OBJECTS) { if (StringUtils.isNotEmpty(prefix)) { if (!object.startsWith(prefix)) { continue; } } list.add(createBucketContents(object)); } return list; } BucketContents createBucketContents(String key) { String lastModified = "lastModified"; String etag = "etag"; String size = "size"; String storageClass = "storageClass"; Owner owner = new Owner(0L, "name"); return new BucketContents(key, lastModified, etag, size, storageClass, owner); } static class Param { final String prefix; final String delimiter; String[] expectedPrefixes = new String[0]; String[] expectedKeys = new String[0]; private Param(final String prefix, final String delimiter) { this.prefix = prefix; this.delimiter = delimiter; } Param prefixes(final String... expectedPrefixes) { this.expectedPrefixes = expectedPrefixes; return this; } Param keys(final String... expectedKeys) { this.expectedKeys = expectedKeys; return this; } @Override public String toString() { return String.format("prefix=%s, delimiter=%s", prefix, delimiter); } } static Param param(final String prefix, final String delimiter) { return new Param(prefix, delimiter); } }
server/src/test/java/com/adobe/testing/s3mock/FileStoreControllerTest.java
/* * Copyright 2017-2022 Adobe. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adobe.testing.s3mock; import static com.adobe.testing.s3mock.FileStoreController.collapseCommonPrefixes; import static com.adobe.testing.s3mock.FileStoreController.filterBucketContentsBy; import static java.util.stream.Collectors.toList; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.head; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import com.adobe.testing.s3mock.dto.Bucket; import com.adobe.testing.s3mock.dto.BucketContents; import com.adobe.testing.s3mock.dto.Buckets; import com.adobe.testing.s3mock.dto.ListAllMyBucketsResult; import com.adobe.testing.s3mock.dto.Owner; import com.adobe.testing.s3mock.store.FileStore; import com.adobe.testing.s3mock.store.KmsKeyStore; import com.adobe.testing.s3mock.store.S3Object; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.xml.XmlMapper; import java.io.IOException; import java.nio.file.Paths; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureWebMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.result.MockMvcResultMatchers; @AutoConfigureWebMvc @AutoConfigureMockMvc @SpringBootTest(classes = {S3MockConfiguration.class}) class FileStoreControllerTest { //verbatim copy from FileStoreController / FileStore private static final Owner TEST_OWNER = new Owner(123, "s3-mock-file-store"); private static final ObjectMapper MAPPER = new XmlMapper(); private static final String[] ALL_OBJECTS = new String[] {"3330/0", "33309/0", "a", "b", "b/1", "b/1/1", "b/1/2", "b/2", "c/1", "c/1/1", "d:1", "d:1:1", "eor.txt", "foo/eor.txt"}; private static final String TEST_BUCKET_NAME = "testBucket"; private static final Bucket TEST_BUCKET = new Bucket(Paths.get("/tmp/foo/1"), TEST_BUCKET_NAME, Instant.now().toString()); @MockBean private KmsKeyStore kmsKeyStore; //Dependency of S3MockConfiguration. @MockBean private FileStore fileStore; @Autowired private MockMvc mockMvc; @Test void testListBuckets_Ok() throws Exception { List<Bucket> bucketList = new ArrayList<>(); bucketList.add(TEST_BUCKET); bucketList.add(new Bucket(Paths.get("/tmp/foo/2"), "testBucket1", Instant.now().toString())); when(fileStore.listBuckets()).thenReturn(bucketList); ListAllMyBucketsResult expected = new ListAllMyBucketsResult(); Buckets buckets = new Buckets(); buckets.setBuckets(bucketList); expected.setBuckets(buckets); expected.setOwner(TEST_OWNER); mockMvc.perform( get("/") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.content().contentType(MediaType.APPLICATION_XML)) .andExpect(MockMvcResultMatchers.content().xml(MAPPER.writeValueAsString(expected))); } @Test void testListBuckets_Empty() throws Exception { when(fileStore.listBuckets()).thenReturn(Collections.emptyList()); ListAllMyBucketsResult expected = new ListAllMyBucketsResult(); expected.setOwner(TEST_OWNER); mockMvc.perform( get("/") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.content().contentType(MediaType.APPLICATION_XML)) .andExpect(MockMvcResultMatchers.content().xml(MAPPER.writeValueAsString(expected))); } @Test void testHeadBucket_Ok() throws Exception { when(fileStore.doesBucketExist(TEST_BUCKET_NAME)).thenReturn(true); mockMvc.perform( head("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()); } @Test void testHeadBucket_NotFound() throws Exception { when(fileStore.doesBucketExist(TEST_BUCKET_NAME)).thenReturn(false); mockMvc.perform( head("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isNotFound()); } @Test void testCreateBucket_Ok() throws Exception { mockMvc.perform( put("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isOk()); } @Test void testCreateBucket_InternalServerError() throws Exception { when(fileStore.createBucket(TEST_BUCKET_NAME)) .thenThrow(new RuntimeException("THIS IS EXPECTED")); mockMvc.perform( put("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isInternalServerError()); } @Test void testDeleteBucket_NoContent() throws Exception { when(fileStore.getBucket(TEST_BUCKET_NAME)).thenReturn(TEST_BUCKET); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)).thenReturn(Collections.emptyList()); when(fileStore.deleteBucket(TEST_BUCKET_NAME)).thenReturn(true); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isNoContent()); } @Test void testDeleteBucket_NotFound() throws Exception { when(fileStore.getBucket(TEST_BUCKET_NAME)).thenReturn(TEST_BUCKET); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)).thenReturn(Collections.emptyList()); when(fileStore.deleteBucket(TEST_BUCKET_NAME)).thenReturn(false); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isNotFound()); } @Test void testDeleteBucket_Conflict() throws Exception { when(fileStore.getBucket(TEST_BUCKET_NAME)).thenReturn(TEST_BUCKET); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)) .thenReturn(Collections.singletonList(new S3Object())); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isConflict()); } @Test void testDeleteBucket_InternalServerError() throws Exception { when(fileStore.getBucket(TEST_BUCKET_NAME)).thenReturn(TEST_BUCKET); when(fileStore.getS3Objects(TEST_BUCKET_NAME, null)) .thenThrow(new IOException("THIS IS EXPECTED")); mockMvc.perform( delete("/testBucket") .accept(MediaType.APPLICATION_XML) .contentType(MediaType.APPLICATION_XML) ).andExpect(MockMvcResultMatchers.status().isInternalServerError()); } /** * Parameter factory. * Taken from ListObjectIT to make sure we unit test against the same data. */ public static Iterable<Param> data() { return Arrays.asList( param(null, null).keys(ALL_OBJECTS), param("", null).keys(ALL_OBJECTS), param(null, "").keys(ALL_OBJECTS), param(null, "/").keys("a", "b", "d:1", "d:1:1", "eor.txt") .prefixes("3330/", "foo/", "c/", "b/", "33309/"), param("", "").keys(ALL_OBJECTS), param("/", null), param("b", null).keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), param("b/", null).keys("b/1", "b/1/1", "b/1/2", "b/2"), param("b", "").keys("b", "b/1", "b/1/1", "b/1/2", "b/2"), param("b", "/").keys("b").prefixes("b/"), param("b/", "/").keys("b/1", "b/2").prefixes("b/1/"), param("b/1", "/").keys("b/1").prefixes("b/1/"), param("b/1/", "/").keys("b/1/1", "b/1/2"), param("c", "/").prefixes("c/"), param("c/", "/").keys("c/1").prefixes("c/1/"), param("eor", "/").keys("eor.txt") ); } @ParameterizedTest @MethodSource("data") public void testCommonPrefixesAndBucketContentFilter(final Param parameters) { String prefix = parameters.prefix; String delimiter = parameters.delimiter; List<BucketContents> bucketContents = createBucketContentsList(prefix); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); List<BucketContents> filteredBucketContents = filterBucketContentsBy(bucketContents, commonPrefixes); String[] expectedPrefixes = parameters.expectedPrefixes; String[] expectedKeys = parameters.expectedKeys; assertThat(commonPrefixes).hasSize(expectedPrefixes.length); assertThat(commonPrefixes) .as("Returned prefixes are correct") .containsExactlyInAnyOrderElementsOf(Arrays.asList(expectedPrefixes)); assertThat(filteredBucketContents.stream().map(BucketContents::getKey).collect(toList())) .as("Returned keys are correct") .containsExactlyInAnyOrderElementsOf(Arrays.asList(expectedKeys)); } @Test void testCommonPrefixesNoPrefixNoDelimiter() { String prefix = ""; String delimiter = ""; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(0); } @Test void testCommonPrefixesPrefixNoDelimiter() { String prefix = "prefixa"; String delimiter = ""; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(0); } @Test void testCommonPrefixesNoPrefixDelimiter() { String prefix = ""; String delimiter = "/"; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(5).contains("3330/", "foo/", "c/", "b/", "33309/"); } @Test void testCommonPrefixesPrefixDelimiter() { String prefix = "3330"; String delimiter = "/"; List<BucketContents> bucketContents = createBucketContentsList(); Set<String> commonPrefixes = collapseCommonPrefixes(prefix, delimiter, bucketContents); assertThat(commonPrefixes).hasSize(2).contains("3330/", "33309/"); } List<BucketContents> createBucketContentsList() { return createBucketContentsList(null); } List<BucketContents> createBucketContentsList(String prefix) { List<BucketContents> list = new ArrayList<>(); for (String object : ALL_OBJECTS) { if (StringUtils.isNotEmpty(prefix)) { if (!object.startsWith(prefix)) { continue; } } list.add(createBucketContents(object)); } return list; } BucketContents createBucketContents(String key) { String lastModified = "lastModified"; String etag = "etag"; String size = "size"; String storageClass = "storageClass"; Owner owner = new Owner(0L, "name"); return new BucketContents(key, lastModified, etag, size, storageClass, owner); } static class Param { final String prefix; final String delimiter; String[] expectedPrefixes = new String[0]; String[] expectedKeys = new String[0]; private Param(final String prefix, final String delimiter) { this.prefix = prefix; this.delimiter = delimiter; } Param prefixes(final String... expectedPrefixes) { this.expectedPrefixes = expectedPrefixes; return this; } Param keys(final String... expectedKeys) { this.expectedKeys = expectedKeys; return this; } @Override public String toString() { return String.format("prefix=%s, delimiter=%s", prefix, delimiter); } } static Param param(final String prefix, final String delimiter) { return new Param(prefix, delimiter); } }
Tests for ListObjectsInsideBucket
server/src/test/java/com/adobe/testing/s3mock/FileStoreControllerTest.java
Tests for ListObjectsInsideBucket
Java
apache-2.0
d852f14ca647634d84c0fc8ce8c1f6ccdd77c0e8
0
apache/pdfbox,kalaspuffar/pdfbox,apache/pdfbox,kalaspuffar/pdfbox
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdfparser; import static org.apache.pdfbox.util.Charsets.ISO_8859_1; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import java.util.Set; import java.util.TreeMap; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSDocument; import org.apache.pdfbox.cos.COSInputStream; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSNull; import org.apache.pdfbox.cos.COSNumber; import org.apache.pdfbox.cos.COSObject; import org.apache.pdfbox.cos.COSObjectKey; import org.apache.pdfbox.cos.COSStream; import org.apache.pdfbox.io.RandomAccessRead; import org.apache.pdfbox.pdfparser.XrefTrailerResolver.XRefType; import org.apache.pdfbox.pdmodel.encryption.SecurityHandler; /** * PDF-Parser which first reads startxref and xref tables in order to know valid objects and parse only these objects. * * First {@link PDFParser#parse()} or {@link FDFParser#parse()} must be called before page objects * can be retrieved, e.g. {@link PDFParser#getPDDocument()}. * * This class is a much enhanced version of <code>QuickParser</code> presented in <a * href="https://issues.apache.org/jira/browse/PDFBOX-1104">PDFBOX-1104</a> by Jeremy Villalobos. */ public class COSParser extends BaseParser { private static final String PDF_HEADER = "%PDF-"; private static final String FDF_HEADER = "%FDF-"; private static final String PDF_DEFAULT_VERSION = "1.4"; private static final String FDF_DEFAULT_VERSION = "1.0"; private static final char[] XREF_TABLE = new char[] { 'x', 'r', 'e', 'f' }; private static final char[] XREF_STREAM = new char[] { '/', 'X', 'R', 'e', 'f' }; private static final char[] STARTXREF = new char[] { 's','t','a','r','t','x','r','e','f' }; private static final byte[] ENDSTREAM = new byte[] { E, N, D, S, T, R, E, A, M }; private static final byte[] ENDOBJ = new byte[] { E, N, D, O, B, J }; private static final long MINIMUM_SEARCH_OFFSET = 6; private static final int X = 'x'; private static final int STRMBUFLEN = 2048; private final byte[] strmBuf = new byte[ STRMBUFLEN ]; protected final RandomAccessRead source; /** * Only parse the PDF file minimally allowing access to basic information. */ public static final String SYSPROP_PARSEMINIMAL = "org.apache.pdfbox.pdfparser.nonSequentialPDFParser.parseMinimal"; /** * The range within the %%EOF marker will be searched. * Useful if there are additional characters after %%EOF within the PDF. */ public static final String SYSPROP_EOFLOOKUPRANGE = "org.apache.pdfbox.pdfparser.nonSequentialPDFParser.eofLookupRange"; /** * How many trailing bytes to read for EOF marker. */ private static final int DEFAULT_TRAIL_BYTECOUNT = 2048; /** * EOF-marker. */ protected static final char[] EOF_MARKER = new char[] { '%', '%', 'E', 'O', 'F' }; /** * obj-marker. */ protected static final char[] OBJ_MARKER = new char[] { 'o', 'b', 'j' }; /** * ObjStream-marker. */ private static final char[] OBJ_STREAM = new char[] { '/', 'O', 'b', 'j', 'S', 't', 'm' }; private long trailerOffset; /** * file length. */ protected long fileLen; /** * is parser using auto healing capacity ? */ private boolean isLenient = true; protected boolean initialParseDone = false; /** * Contains all found objects of a brute force search. */ private Map<COSObjectKey, Long> bfSearchCOSObjectKeyOffsets = null; private Long lastEOFMarker = null; private List<Long> bfSearchXRefTablesOffsets = null; private List<Long> bfSearchXRefStreamsOffsets = null; /** * The security handler. */ protected SecurityHandler securityHandler = null; /** * how many trailing bytes to read for EOF marker. */ private int readTrailBytes = DEFAULT_TRAIL_BYTECOUNT; private static final Log LOG = LogFactory.getLog(COSParser.class); /** * Collects all Xref/trailer objects and resolves them into single * object using startxref reference. */ protected XrefTrailerResolver xrefTrailerResolver = new XrefTrailerResolver(); /** * The prefix for the temp file being used. */ public static final String TMP_FILE_PREFIX = "tmpPDF"; /** * Default constructor. */ public COSParser(RandomAccessRead source) { super(new RandomAccessSource(source)); this.source = source; } /** * Sets how many trailing bytes of PDF file are searched for EOF marker and 'startxref' marker. If not set we use * default value {@link #DEFAULT_TRAIL_BYTECOUNT}. * * <p>We check that new value is at least 16. However for practical use cases this value should not be lower than * 1000; even 2000 was found to not be enough in some cases where some trailing garbage like HTML snippets followed * the EOF marker.</p> * * <p> * In case system property {@link #SYSPROP_EOFLOOKUPRANGE} is defined this value will be set on initialization but * can be overwritten later. * </p> * * @param byteCount number of trailing bytes */ public void setEOFLookupRange(int byteCount) { if (byteCount > 15) { readTrailBytes = byteCount; } } /** * Read the trailer information and provide a COSDictionary containing the trailer information. * * @return a COSDictionary containing the trailer information * @throws IOException if something went wrong */ protected COSDictionary retrieveTrailer() throws IOException { COSDictionary trailer = null; boolean rebuildTrailer = false; try { // parse startxref // TODO FDF files don't have a startxref value, so that rebuildTrailer is triggered long startXRefOffset = getStartxrefOffset(); if (startXRefOffset > -1) { trailer = parseXref(startXRefOffset); } else { rebuildTrailer = isLenient(); } } catch (IOException exception) { if (isLenient()) { rebuildTrailer = true; } else { throw exception; } } // check if the trailer contains a Root object if (trailer != null && trailer.getItem(COSName.ROOT) == null) { rebuildTrailer = isLenient(); } if (rebuildTrailer) { trailer = rebuildTrailer(); } return trailer; } /** * Parses cross reference tables. * * @param startXRefOffset start offset of the first table * @return the trailer dictionary * @throws IOException if something went wrong */ private COSDictionary parseXref(long startXRefOffset) throws IOException { source.seek(startXRefOffset); long startXrefOffset = Math.max(0, parseStartXref()); // check the startxref offset long fixedOffset = checkXRefOffset(startXrefOffset); if (fixedOffset > -1) { startXrefOffset = fixedOffset; } document.setStartXref(startXrefOffset); long prev = startXrefOffset; // ---- parse whole chain of xref tables/object streams using PREV reference Set<Long> prevSet = new HashSet<>(); while (prev > 0) { // seek to xref table source.seek(prev); // skip white spaces skipSpaces(); // -- parse xref if (source.peek() == X) { // xref table and trailer // use existing parser to parse xref table parseXrefTable(prev); if (!parseTrailer()) { throw new IOException("Expected trailer object at position: " + source.getPosition()); } COSDictionary trailer = xrefTrailerResolver.getCurrentTrailer(); // check for a XRef stream, it may contain some object ids of compressed objects if(trailer.containsKey(COSName.XREF_STM)) { int streamOffset = trailer.getInt(COSName.XREF_STM); // check the xref stream reference fixedOffset = checkXRefOffset(streamOffset); if (fixedOffset > -1 && fixedOffset != streamOffset) { LOG.warn("/XRefStm offset " + streamOffset + " is incorrect, corrected to " + fixedOffset); streamOffset = (int)fixedOffset; trailer.setInt(COSName.XREF_STM, streamOffset); } if (streamOffset > 0) { source.seek(streamOffset); skipSpaces(); try { parseXrefObjStream(prev, false); } catch (IOException ex) { if (isLenient) { LOG.error("Failed to parse /XRefStm at offset " + streamOffset, ex); } else { throw ex; } } } else { if(isLenient) { LOG.error("Skipped XRef stream due to a corrupt offset:"+streamOffset); } else { throw new IOException("Skipped XRef stream due to a corrupt offset:"+streamOffset); } } } prev = trailer.getLong(COSName.PREV); if (prev > 0) { // check the xref table reference fixedOffset = checkXRefOffset(prev); if (fixedOffset > -1 && fixedOffset != prev) { prev = fixedOffset; trailer.setLong(COSName.PREV, prev); } } } else { // parse xref stream prev = parseXrefObjStream(prev, true); if (prev > 0) { // check the xref table reference fixedOffset = checkXRefOffset(prev); if (fixedOffset > -1 && fixedOffset != prev) { prev = fixedOffset; COSDictionary trailer = xrefTrailerResolver.getCurrentTrailer(); trailer.setLong(COSName.PREV, prev); } } } if (prevSet.contains(prev)) { throw new IOException("/Prev loop at offset " + prev); } prevSet.add(prev); } // ---- build valid xrefs out of the xref chain xrefTrailerResolver.setStartxref(startXrefOffset); COSDictionary trailer = xrefTrailerResolver.getTrailer(); document.setTrailer(trailer); document.setIsXRefStream(XRefType.STREAM == xrefTrailerResolver.getXrefType()); // check the offsets of all referenced objects checkXrefOffsets(); // copy xref table document.addXRefTable(xrefTrailerResolver.getXrefTable()); return trailer; } /** * Parses an xref object stream starting with indirect object id. * * @return value of PREV item in dictionary or <code>-1</code> if no such item exists */ private long parseXrefObjStream(long objByteOffset, boolean isStandalone) throws IOException { // ---- parse indirect object head readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); COSDictionary dict = parseCOSDictionary(); try (COSStream xrefStream = parseCOSStream(dict)) { parseXrefStream(xrefStream, objByteOffset, isStandalone); } return dict.getLong(COSName.PREV); } /** * Looks for and parses startxref. We first look for last '%%EOF' marker (within last * {@link #DEFAULT_TRAIL_BYTECOUNT} bytes (or range set via {@link #setEOFLookupRange(int)}) and go back to find * <code>startxref</code>. * * @return the offset of StartXref * @throws IOException If something went wrong. */ private final long getStartxrefOffset() throws IOException { byte[] buf; long skipBytes; // read trailing bytes into buffer try { final int trailByteCount = (fileLen < readTrailBytes) ? (int) fileLen : readTrailBytes; buf = new byte[trailByteCount]; skipBytes = fileLen - trailByteCount; source.seek(skipBytes); int off = 0; int readBytes; while (off < trailByteCount) { readBytes = source.read(buf, off, trailByteCount - off); // in order to not get stuck in a loop we check readBytes (this should never happen) if (readBytes < 1) { throw new IOException( "No more bytes to read for trailing buffer, but expected: " + (trailByteCount - off)); } off += readBytes; } } finally { source.seek(0); } // find last '%%EOF' int bufOff = lastIndexOf(EOF_MARKER, buf, buf.length); if (bufOff < 0) { if (isLenient) { // in lenient mode the '%%EOF' isn't needed bufOff = buf.length; LOG.debug("Missing end of file marker '" + new String(EOF_MARKER) + "'"); } else { throw new IOException("Missing end of file marker '" + new String(EOF_MARKER) + "'"); } } // find last startxref preceding EOF marker bufOff = lastIndexOf(STARTXREF, buf, bufOff); if (bufOff < 0) { throw new IOException("Missing 'startxref' marker."); } else { return skipBytes + bufOff; } } /** * Searches last appearance of pattern within buffer. Lookup before _lastOff and goes back until 0. * * @param pattern pattern to search for * @param buf buffer to search pattern in * @param endOff offset (exclusive) where lookup starts at * * @return start offset of pattern within buffer or <code>-1</code> if pattern could not be found */ protected int lastIndexOf(final char[] pattern, final byte[] buf, final int endOff) { final int lastPatternChOff = pattern.length - 1; int bufOff = endOff; int patOff = lastPatternChOff; char lookupCh = pattern[patOff]; while (--bufOff >= 0) { if (buf[bufOff] == lookupCh) { if (--patOff < 0) { // whole pattern matched return bufOff; } // matched current char, advance to preceding one lookupCh = pattern[patOff]; } else if (patOff < lastPatternChOff) { // no char match but already matched some chars; reset patOff = lastPatternChOff; lookupCh = pattern[patOff]; } } return -1; } /** * Return true if parser is lenient. Meaning auto healing capacity of the parser are used. * * @return true if parser is lenient */ public boolean isLenient() { return isLenient; } /** * Change the parser leniency flag. * * This method can only be called before the parsing of the file. * * @param lenient try to handle malformed PDFs. * */ public void setLenient(boolean lenient) { if (initialParseDone) { throw new IllegalArgumentException("Cannot change leniency after parsing"); } this.isLenient = lenient; } /** * Creates a unique object id using object number and object generation * number. (requires object number &lt; 2^31)) */ private long getObjectId(final COSObject obj) { return obj.getObjectNumber() << 32 | obj.getGenerationNumber(); } /** * Adds all from newObjects to toBeParsedList if it is not an COSObject or * we didn't add this COSObject already (checked via addedObjects). */ private void addNewToList(final Queue<COSBase> toBeParsedList, final Collection<COSBase> newObjects, final Set<Long> addedObjects) { for (COSBase newObject : newObjects) { addNewToList(toBeParsedList, newObject, addedObjects); } } /** * Adds newObject to toBeParsedList if it is not an COSObject or we didn't * add this COSObject already (checked via addedObjects). */ private void addNewToList(final Queue<COSBase> toBeParsedList, final COSBase newObject, final Set<Long> addedObjects) { if (newObject instanceof COSObject) { final long objId = getObjectId((COSObject) newObject); if (!addedObjects.add(objId)) { return; } } toBeParsedList.add(newObject); } /** * Will parse every object necessary to load a single page from the pdf document. We try our * best to order objects according to offset in file before reading to minimize seek operations. * * @param dict the COSObject from the parent pages. * @param excludeObjects dictionary object reference entries with these names will not be parsed * * @throws IOException if something went wrong */ protected void parseDictObjects(COSDictionary dict, COSName... excludeObjects) throws IOException { // ---- create queue for objects waiting for further parsing final Queue<COSBase> toBeParsedList = new LinkedList<>(); // offset ordered object map final TreeMap<Long, List<COSObject>> objToBeParsed = new TreeMap<>(); // in case of compressed objects offset points to stmObj final Set<Long> parsedObjects = new HashSet<>(); final Set<Long> addedObjects = new HashSet<>(); addExcludedToList(excludeObjects, dict, parsedObjects); addNewToList(toBeParsedList, dict.getValues(), addedObjects); // ---- go through objects to be parsed while (!(toBeParsedList.isEmpty() && objToBeParsed.isEmpty())) { // -- first get all COSObject from other kind of objects and // put them in objToBeParsed; afterwards toBeParsedList is empty COSBase baseObj; while ((baseObj = toBeParsedList.poll()) != null) { if (baseObj instanceof COSDictionary) { addNewToList(toBeParsedList, ((COSDictionary) baseObj).getValues(), addedObjects); } else if (baseObj instanceof COSArray) { for (COSBase cosBase : ((COSArray) baseObj)) { addNewToList(toBeParsedList, cosBase, addedObjects); } } else if (baseObj instanceof COSObject) { COSObject obj = (COSObject) baseObj; long objId = getObjectId(obj); COSObjectKey objKey = new COSObjectKey(obj.getObjectNumber(), obj.getGenerationNumber()); if (!parsedObjects.contains(objId)) { Long fileOffset = document.getXrefTable().get(objKey); if (fileOffset == null && isLenient) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); fileOffset = bfCOSObjectKeyOffsets.get(objKey); if (fileOffset != null) { LOG.debug("Set missing " + fileOffset + " for object " + objKey); document.getXrefTable().put(objKey, fileOffset); } } // it is allowed that object references point to null, thus we have to test if (fileOffset != null && fileOffset != 0) { if (fileOffset > 0) { objToBeParsed.put(fileOffset, Collections.singletonList(obj)); } else { // negative offset means we have a compressed // object within object stream; // get offset of object stream COSObjectKey key = new COSObjectKey((int) -fileOffset, 0); fileOffset = document.getXrefTable().get(key); if ((fileOffset == null) || (fileOffset <= 0)) { if (isLenient) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); fileOffset = bfCOSObjectKeyOffsets.get(key); if (fileOffset != null) { LOG.debug("Set missing " + fileOffset + " for object " + key); document.getXrefTable().put(key, fileOffset); } } else { throw new IOException( "Invalid object stream xref object reference for key '" + objKey + "': " + fileOffset); } } List<COSObject> stmObjects = objToBeParsed.get(fileOffset); if (stmObjects == null) { stmObjects = new ArrayList<>(); objToBeParsed.put(fileOffset, stmObjects); } // java does not have a test for immutable else if (!(stmObjects instanceof ArrayList)) { throw new IOException(obj + " cannot be assigned to offset " + fileOffset + ", this belongs to " + stmObjects.get(0)); } stmObjects.add(obj); } } else { // NULL object COSObject pdfObject = document.getObjectFromPool(objKey); pdfObject.setObject(COSNull.NULL); } } } } // ---- read first COSObject with smallest offset // resulting object will be added to toBeParsedList if (objToBeParsed.isEmpty()) { break; } for (COSObject obj : objToBeParsed.remove(objToBeParsed.firstKey())) { COSBase parsedObj = parseObjectDynamically(obj, false); if (parsedObj != null) { obj.setObject(parsedObj); addNewToList(toBeParsedList, parsedObj, addedObjects); parsedObjects.add(getObjectId(obj)); } } } } // add objects not to be parsed to list of already parsed objects private void addExcludedToList(COSName[] excludeObjects, COSDictionary dict, final Set<Long> parsedObjects) { if (excludeObjects != null) { for (COSName objName : excludeObjects) { COSBase baseObj = dict.getItem(objName); if (baseObj instanceof COSObject) { parsedObjects.add(getObjectId((COSObject) baseObj)); } } } } /** * This will parse the next object from the stream and add it to the local state. * * @param obj object to be parsed (we only take object number and generation number for lookup start offset) * @param requireExistingNotCompressedObj if <code>true</code> object to be parsed must not be contained within * compressed stream * @return the parsed object (which is also added to document object) * * @throws IOException If an IO error occurs. */ protected final COSBase parseObjectDynamically(COSObject obj, boolean requireExistingNotCompressedObj) throws IOException { return parseObjectDynamically(obj.getObjectNumber(), obj.getGenerationNumber(), requireExistingNotCompressedObj); } /** * This will parse the next object from the stream and add it to the local state. * It's reduced to parsing an indirect object. * * @param objNr object number of object to be parsed * @param objGenNr object generation number of object to be parsed * @param requireExistingNotCompressedObj if <code>true</code> the object to be parsed must be defined in xref * (comment: null objects may be missing from xref) and it must not be a compressed object within object stream * (this is used to circumvent being stuck in a loop in a malicious PDF) * * @return the parsed object (which is also added to document object) * * @throws IOException If an IO error occurs. */ protected COSBase parseObjectDynamically(long objNr, int objGenNr, boolean requireExistingNotCompressedObj) throws IOException { // ---- create object key and get object (container) from pool final COSObjectKey objKey = new COSObjectKey(objNr, objGenNr); final COSObject pdfObject = document.getObjectFromPool(objKey); if (pdfObject.getObject() == null) { // not previously parsed // ---- read offset or object stream object number from xref table Long offsetOrObjstmObNr = document.getXrefTable().get(objKey); // maybe something is wrong with the xref table -> perform brute force search for all objects if (offsetOrObjstmObNr == null && isLenient) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); offsetOrObjstmObNr = bfCOSObjectKeyOffsets.get(objKey); if (offsetOrObjstmObNr != null) { LOG.debug("Set missing offset " + offsetOrObjstmObNr + " for object " + objKey); document.getXrefTable().put(objKey, offsetOrObjstmObNr); } } // sanity test to circumvent loops with broken documents if (requireExistingNotCompressedObj && ((offsetOrObjstmObNr == null) || (offsetOrObjstmObNr <= 0))) { throw new IOException("Object must be defined and must not be compressed object: " + objKey.getNumber() + ":" + objKey.getGeneration()); } if (offsetOrObjstmObNr == null) { // not defined object -> NULL object (Spec. 1.7, chap. 3.2.9) pdfObject.setObject(COSNull.NULL); } else if (offsetOrObjstmObNr > 0) { // offset of indirect object in file parseFileObject(offsetOrObjstmObNr, objKey, pdfObject); } else { // xref value is object nr of object stream containing object to be parsed // since our object was not found it means object stream was not parsed so far parseObjectStream((int) -offsetOrObjstmObNr); } } return pdfObject.getObject(); } private void parseFileObject(Long offsetOrObjstmObNr, final COSObjectKey objKey, final COSObject pdfObject) throws IOException { // ---- go to object start source.seek(offsetOrObjstmObNr); // ---- we must have an indirect object final long readObjNr = readObjectNumber(); final int readObjGen = readGenerationNumber(); readExpectedString(OBJ_MARKER, true); // ---- consistency check if ((readObjNr != objKey.getNumber()) || (readObjGen != objKey.getGeneration())) { throw new IOException("XREF for " + objKey.getNumber() + ":" + objKey.getGeneration() + " points to wrong object: " + readObjNr + ":" + readObjGen + " at offset " + offsetOrObjstmObNr); } skipSpaces(); COSBase pb = parseDirObject(); String endObjectKey = readString(); if (endObjectKey.equals(STREAM_STRING)) { source.rewind(endObjectKey.getBytes(ISO_8859_1).length); if (pb instanceof COSDictionary) { COSStream stream = parseCOSStream((COSDictionary) pb); if (securityHandler != null) { securityHandler.decryptStream(stream, objKey.getNumber(), objKey.getGeneration()); } pb = stream; } else { // this is not legal // the combination of a dict and the stream/endstream // forms a complete stream object throw new IOException("Stream not preceded by dictionary (offset: " + offsetOrObjstmObNr + ")."); } skipSpaces(); endObjectKey = readLine(); // we have case with a second 'endstream' before endobj if (!endObjectKey.startsWith(ENDOBJ_STRING) && endObjectKey.startsWith(ENDSTREAM_STRING)) { endObjectKey = endObjectKey.substring(9).trim(); if (endObjectKey.length() == 0) { // no other characters in extra endstream line // read next line endObjectKey = readLine(); } } } else if (securityHandler != null) { securityHandler.decrypt(pb, objKey.getNumber(), objKey.getGeneration()); } pdfObject.setObject(pb); if (!endObjectKey.startsWith(ENDOBJ_STRING)) { if (isLenient) { LOG.warn("Object (" + readObjNr + ":" + readObjGen + ") at offset " + offsetOrObjstmObNr + " does not end with 'endobj' but with '" + endObjectKey + "'"); } else { throw new IOException("Object (" + readObjNr + ":" + readObjGen + ") at offset " + offsetOrObjstmObNr + " does not end with 'endobj' but with '" + endObjectKey + "'"); } } } private void parseObjectStream(int objstmObjNr) throws IOException { final COSBase objstmBaseObj = parseObjectDynamically(objstmObjNr, 0, true); if (objstmBaseObj instanceof COSStream) { // parse object stream PDFObjectStreamParser parser; try { parser = new PDFObjectStreamParser((COSStream) objstmBaseObj, document); } catch (IOException ex) { if (isLenient) { LOG.error("object stream " + objstmObjNr + " could not be parsed due to an exception", ex); return; } else { throw ex; } } try { parser.parse(); } catch(IOException exception) { if (isLenient) { LOG.debug("Stop reading object stream "+objstmObjNr+" due to an exception", exception); // the error is handled in parseDictObjects return; } else { throw exception; } } // register all objects which are referenced to be contained in object stream for (COSObject next : parser.getObjects()) { COSObjectKey stmObjKey = new COSObjectKey(next); Long offset = document.getXrefTable().get(stmObjKey); if (offset != null && offset == -objstmObjNr) { COSObject stmObj = document.getObjectFromPool(stmObjKey); stmObj.setObject(next.getObject()); } } } } /** * Returns length value referred to or defined in given object. */ private COSNumber getLength(final COSBase lengthBaseObj, final COSName streamType) throws IOException { if (lengthBaseObj == null) { return null; } COSNumber retVal = null; // maybe length was given directly if (lengthBaseObj instanceof COSNumber) { retVal = (COSNumber) lengthBaseObj; } // length in referenced object else if (lengthBaseObj instanceof COSObject) { COSObject lengthObj = (COSObject) lengthBaseObj; if (lengthObj.getObject() == null) { // not read so far, keep current stream position final long curFileOffset = source.getPosition(); boolean isObjectStream = COSName.OBJ_STM.equals(streamType); parseObjectDynamically(lengthObj, isObjectStream); // reset current stream position source.seek(curFileOffset); if (lengthObj.getObject() == null) { throw new IOException("Length object content was not read."); } } if (!(lengthObj.getObject() instanceof COSNumber)) { throw new IOException("Wrong type of referenced length object " + lengthObj + ": " + lengthObj.getObject().getClass().getSimpleName()); } retVal = (COSNumber) lengthObj.getObject(); } else { throw new IOException("Wrong type of length object: " + lengthBaseObj.getClass().getSimpleName()); } return retVal; } private static final int STREAMCOPYBUFLEN = 8192; private final byte[] streamCopyBuf = new byte[STREAMCOPYBUFLEN]; /** * This will read a COSStream from the input stream using length attribute within dictionary. If * length attribute is a indirect reference it is first resolved to get the stream length. This * means we copy stream data without testing for 'endstream' or 'endobj' and thus it is no * problem if these keywords occur within stream. We require 'endstream' to be found after * stream data is read. * * @param dic dictionary that goes with this stream. * * @return parsed pdf stream. * * @throws IOException if an error occurred reading the stream, like problems with reading * length attribute, stream does not end with 'endstream' after data read, stream too short etc. */ protected COSStream parseCOSStream(COSDictionary dic) throws IOException { COSStream stream = document.createCOSStream(dic); // read 'stream'; this was already tested in parseObjectsDynamically() readString(); skipWhiteSpaces(); /* * This needs to be dic.getItem because when we are parsing, the underlying object might still be null. */ COSNumber streamLengthObj = getLength(dic.getItem(COSName.LENGTH), dic.getCOSName(COSName.TYPE)); if (streamLengthObj == null) { if (isLenient) { LOG.warn("The stream doesn't provide any stream length, using fallback readUntilEnd, at offset " + source.getPosition()); } else { throw new IOException("Missing length for stream."); } } // get output stream to copy data to try (OutputStream out = stream.createRawOutputStream()) { if (streamLengthObj != null && validateStreamLength(streamLengthObj.longValue())) { readValidStream(out, streamLengthObj); } else { readUntilEndStream(new EndstreamOutputStream(out)); } } String endStream = readString(); if (endStream.equals("endobj") && isLenient) { LOG.warn("stream ends with 'endobj' instead of 'endstream' at offset " + source.getPosition()); // avoid follow-up warning about missing endobj source.rewind(ENDOBJ.length); } else if (endStream.length() > 9 && isLenient && endStream.substring(0,9).equals(ENDSTREAM_STRING)) { LOG.warn("stream ends with '" + endStream + "' instead of 'endstream' at offset " + source.getPosition()); // unread the "extra" bytes source.rewind(endStream.substring(9).getBytes(ISO_8859_1).length); } else if (!endStream.equals(ENDSTREAM_STRING)) { throw new IOException( "Error reading stream, expected='endstream' actual='" + endStream + "' at offset " + source.getPosition()); } return stream; } /** * This method will read through the current stream object until * we find the keyword "endstream" meaning we're at the end of this * object. Some pdf files, however, forget to write some endstream tags * and just close off objects with an "endobj" tag so we have to handle * this case as well. * * This method is optimized using buffered IO and reduced number of * byte compare operations. * * @param out stream we write out to. * * @throws IOException if something went wrong */ private void readUntilEndStream( final OutputStream out ) throws IOException { int bufSize; int charMatchCount = 0; byte[] keyw = ENDSTREAM; // last character position of shortest keyword ('endobj') final int quickTestOffset = 5; // read next chunk into buffer; already matched chars are added to beginning of buffer while ( ( bufSize = source.read( strmBuf, charMatchCount, STRMBUFLEN - charMatchCount ) ) > 0 ) { bufSize += charMatchCount; int bIdx = charMatchCount; int quickTestIdx; // iterate over buffer, trying to find keyword match for ( int maxQuicktestIdx = bufSize - quickTestOffset; bIdx < bufSize; bIdx++ ) { // reduce compare operations by first test last character we would have to // match if current one matches; if it is not a character from keywords // we can move behind the test character; this shortcut is inspired by the // Boyer-Moore string search algorithm and can reduce parsing time by approx. 20% quickTestIdx = bIdx + quickTestOffset; if (charMatchCount == 0 && quickTestIdx < maxQuicktestIdx) { final byte ch = strmBuf[quickTestIdx]; if ( ( ch > 't' ) || ( ch < 'a' ) ) { // last character we would have to match if current character would match // is not a character from keywords -> jump behind and start over bIdx = quickTestIdx; continue; } } // could be negative - but we only compare to ASCII final byte ch = strmBuf[bIdx]; if ( ch == keyw[ charMatchCount ] ) { if ( ++charMatchCount == keyw.length ) { // match found bIdx++; break; } } else { if ( ( charMatchCount == 3 ) && ( ch == ENDOBJ[ charMatchCount ] ) ) { // maybe ENDSTREAM is missing but we could have ENDOBJ keyw = ENDOBJ; charMatchCount++; } else { // no match; incrementing match start by 1 would be dumb since we already know // matched chars depending on current char read we may already have beginning // of a new match: 'e': first char matched; 'n': if we are at match position // idx 7 we already read 'e' thus 2 chars matched for each other char we have // to start matching first keyword char beginning with next read position charMatchCount = ( ch == E ) ? 1 : ( ( ch == N ) && ( charMatchCount == 7 ) ) ? 2 : 0; // search again for 'endstream' keyw = ENDSTREAM; } } } int contentBytes = Math.max( 0, bIdx - charMatchCount ); // write buffer content until first matched char to output stream if ( contentBytes > 0 ) { out.write( strmBuf, 0, contentBytes ); } if ( charMatchCount == keyw.length ) { // keyword matched; unread matched keyword (endstream/endobj) and following buffered content source.rewind( bufSize - contentBytes ); break; } else { // copy matched chars at start of buffer System.arraycopy( keyw, 0, strmBuf, 0, charMatchCount ); } } // this writes a lonely CR or drops trailing CR LF and LF out.flush(); } private void readValidStream(OutputStream out, COSNumber streamLengthObj) throws IOException { long remainBytes = streamLengthObj.longValue(); while (remainBytes > 0) { final int chunk = (remainBytes > STREAMCOPYBUFLEN) ? STREAMCOPYBUFLEN : (int) remainBytes; final int readBytes = source.read(streamCopyBuf, 0, chunk); if (readBytes <= 0) { // shouldn't happen, the stream length has already been validated throw new IOException("read error at offset " + source.getPosition() + ": expected " + chunk + " bytes, but read() returns " + readBytes); } out.write(streamCopyBuf, 0, readBytes); remainBytes -= readBytes; } } private boolean validateStreamLength(long streamLength) throws IOException { boolean streamLengthIsValid = true; long originOffset = source.getPosition(); long expectedEndOfStream = originOffset + streamLength; if (expectedEndOfStream > fileLen) { streamLengthIsValid = false; LOG.warn("The end of the stream is out of range, using workaround to read the stream, " + "stream start position: " + originOffset + ", length: " + streamLength + ", expected end position: " + expectedEndOfStream); } else { source.seek(expectedEndOfStream); skipSpaces(); if (!isString(ENDSTREAM)) { streamLengthIsValid = false; LOG.warn("The end of the stream doesn't point to the correct offset, using workaround to read the stream, " + "stream start position: " + originOffset + ", length: " + streamLength + ", expected end position: " + expectedEndOfStream); } source.seek(originOffset); } return streamLengthIsValid; } /** * Check if the cross reference table/stream can be found at the current offset. * * @param startXRefOffset * @return the revised offset * @throws IOException */ private long checkXRefOffset(long startXRefOffset) throws IOException { // repair mode isn't available in non-lenient mode if (!isLenient) { return startXRefOffset; } source.seek(startXRefOffset); skipSpaces(); if (source.peek() == X && isString(XREF_TABLE)) { return startXRefOffset; } if (startXRefOffset > 0) { if (checkXRefStreamOffset(startXRefOffset)) { return startXRefOffset; } else { return calculateXRefFixedOffset(startXRefOffset); } } // can't find a valid offset return -1; } /** * Check if the cross reference stream can be found at the current offset. * * @param startXRefOffset the expected start offset of the XRef stream * @return the revised offset * @throws IOException if something went wrong */ private boolean checkXRefStreamOffset(long startXRefOffset) throws IOException { // repair mode isn't available in non-lenient mode if (!isLenient || startXRefOffset == 0) { return true; } // seek to offset-1 source.seek(startXRefOffset-1); int nextValue = source.read(); // the first character has to be a whitespace, and then a digit if (isWhitespace(nextValue)) { skipSpaces(); if (isDigit()) { try { // it's a XRef stream readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); // check the dictionary to avoid false positives COSDictionary dict = parseCOSDictionary(); source.seek(startXRefOffset); if ("XRef".equals(dict.getNameAsString(COSName.TYPE))) { return true; } } catch (IOException exception) { // there wasn't an object of a xref stream source.seek(startXRefOffset); } } } return false; } /** * Try to find a fixed offset for the given xref table/stream. * * @param objectOffset the given offset where to look at * @return the fixed offset * * @throws IOException if something went wrong */ private long calculateXRefFixedOffset(long objectOffset) throws IOException { if (objectOffset < 0) { LOG.error("Invalid object offset " + objectOffset + " when searching for a xref table/stream"); return 0; } // start a brute force search for all xref tables and try to find the offset we are looking for long newOffset = bfSearchForXRef(objectOffset); if (newOffset > -1) { LOG.debug("Fixed reference for xref table/stream " + objectOffset + " -> " + newOffset); return newOffset; } LOG.error("Can't find the object xref table/stream at offset " + objectOffset); return 0; } private boolean validateXrefOffsets(Map<COSObjectKey, Long> xrefOffset) throws IOException { if (xrefOffset == null) { return true; } for (Entry<COSObjectKey, Long> objectEntry : xrefOffset.entrySet()) { COSObjectKey objectKey = objectEntry.getKey(); Long objectOffset = objectEntry.getValue(); // a negative offset number represents a object number itself // see type 2 entry in xref stream if (objectOffset != null && objectOffset >= 0 && !checkObjectKeys(objectKey, objectOffset)) { LOG.debug("Stop checking xref offsets as at least one (" + objectKey + ") couldn't be dereferenced"); return false; } } return true; } /** * Check the XRef table by dereferencing all objects and fixing the offset if necessary. * * @throws IOException if something went wrong. */ private void checkXrefOffsets() throws IOException { // repair mode isn't available in non-lenient mode if (!isLenient) { return; } Map<COSObjectKey, Long> xrefOffset = xrefTrailerResolver.getXrefTable(); if (!validateXrefOffsets(xrefOffset)) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); if (!bfCOSObjectKeyOffsets.isEmpty()) { List<COSObjectKey> objStreams = new ArrayList<>(); // find all object streams for (Entry<COSObjectKey, Long> entry : xrefOffset.entrySet()) { Long offset = entry.getValue(); if (offset != null && offset < 0) { COSObjectKey objStream = new COSObjectKey(-offset, 0); if (!objStreams.contains(objStream)) { objStreams.add(new COSObjectKey(-offset, 0)); } } } // remove all found object streams if (!objStreams.isEmpty()) { for (COSObjectKey key : objStreams) { if (bfCOSObjectKeyOffsets.containsKey(key)) { // remove all parsed objects which are part of an object stream Set<Long> objects = xrefTrailerResolver .getContainedObjectNumbers((int) (key.getNumber())); for (Long objNr : objects) { COSObjectKey streamObjectKey = new COSObjectKey(objNr, 0); Long streamObjectOffset = bfCOSObjectKeyOffsets .get(streamObjectKey); if (streamObjectOffset != null && streamObjectOffset > 0) { bfCOSObjectKeyOffsets.remove(streamObjectKey); } } } else { // remove all objects which are part of an object stream which wasn't found Set<Long> objects = xrefTrailerResolver .getContainedObjectNumbers((int) (key.getNumber())); for (Long objNr : objects) { xrefOffset.remove(new COSObjectKey(objNr, 0)); } } } } LOG.debug("Replaced read xref table with the results of a brute force search"); xrefOffset.putAll(bfCOSObjectKeyOffsets); } } } /** * Check if the given object can be found at the given offset. * * @param objectKey the object we are looking for * @param offset the offset where to look * @return returns true if the given object can be dereferenced at the given offset * @throws IOException if something went wrong */ private boolean checkObjectKeys(COSObjectKey objectKey, long offset) throws IOException { // there can't be any object at the very beginning of a pdf if (offset < MINIMUM_SEARCH_OFFSET) { return false; } long objectNr = objectKey.getNumber(); int objectGen = objectKey.getGeneration(); long originOffset = source.getPosition(); String objectString = createObjectString(objectNr, objectGen); try { source.seek(offset); if (isString(objectString.getBytes(ISO_8859_1))) { // everything is ok, return origin object key source.seek(originOffset); return true; } } catch (IOException exception) { // Swallow the exception, obviously there isn't any valid object number } finally { source.seek(originOffset); } // no valid object number found return false; } /** * Create a string for the given object id. * * @param objectID the object id * @param genID the generation id * @return the generated string */ private String createObjectString(long objectID, int genID) { return Long.toString(objectID) + " " + Integer.toString(genID) + " obj"; } private Map<COSObjectKey, Long> getBFCOSObjectOffsets() throws IOException { if (bfSearchCOSObjectKeyOffsets == null) { bfSearchForObjects(); } return bfSearchCOSObjectKeyOffsets; } /** * Brute force search for every object in the pdf. * * @throws IOException if something went wrong */ private void bfSearchForObjects() throws IOException { bfSearchForLastEOFMarker(); bfSearchCOSObjectKeyOffsets = new HashMap<>(); long originOffset = source.getPosition(); long currentOffset = MINIMUM_SEARCH_OFFSET; long lastObjectId = Long.MIN_VALUE; int lastGenID = Integer.MIN_VALUE; long lastObjOffset = Long.MIN_VALUE; char[] objString = " obj".toCharArray(); char[] endobjString = "endobj".toCharArray(); boolean endobjFound = false; do { source.seek(currentOffset); if (isString(objString)) { long tempOffset = currentOffset - 1; source.seek(tempOffset); int genID = source.peek(); // is the next char a digit? if (isDigit(genID)) { genID -= 48; tempOffset--; source.seek(tempOffset); if (isSpace()) { while (tempOffset > MINIMUM_SEARCH_OFFSET && isSpace()) { source.seek(--tempOffset); } boolean objectIDFound = false; while (tempOffset > MINIMUM_SEARCH_OFFSET && isDigit()) { source.seek(--tempOffset); objectIDFound = true; } if (objectIDFound) { source.read(); long objectId = readObjectNumber(); if (lastObjOffset > 0) { // add the former object ID only if there was a subsequent object ID bfSearchCOSObjectKeyOffsets.put( new COSObjectKey(lastObjectId, lastGenID), lastObjOffset); } lastObjectId = objectId; lastGenID = genID; lastObjOffset = tempOffset + 1; currentOffset += objString.length - 1; endobjFound = false; } } } } else if (isString(endobjString)) { endobjFound = true; currentOffset += endobjString.length - 1; } currentOffset++; } while (currentOffset < lastEOFMarker && !source.isEOF()); if ((lastEOFMarker < Long.MAX_VALUE || endobjFound) && lastObjOffset > 0) { // if the pdf wasn't cut off in the middle or if the last object ends with a "endobj" marker // the last object id has to be added here so that it can't get lost as there isn't any subsequent object id bfSearchCOSObjectKeyOffsets.put(new COSObjectKey(lastObjectId, lastGenID), lastObjOffset); } bfSearchForObjStreams(); // reestablish origin position source.seek(originOffset); } /** * Search for the offset of the given xref table/stream among those found by a brute force search. * * @return the offset of the xref entry * @throws IOException if something went wrong */ private long bfSearchForXRef(long xrefOffset) throws IOException { long newOffset = -1; long newOffsetTable = -1; long newOffsetStream = -1; bfSearchForXRefTables(); bfSearchForXRefStreams(); if (bfSearchXRefTablesOffsets != null) { // TODO to be optimized, this won't work in every case newOffsetTable = searchNearestValue(bfSearchXRefTablesOffsets, xrefOffset); } if (bfSearchXRefStreamsOffsets != null) { // TODO to be optimized, this won't work in every case newOffsetStream = searchNearestValue(bfSearchXRefStreamsOffsets, xrefOffset); } // choose the nearest value if (newOffsetTable > -1 && newOffsetStream > -1) { long differenceTable = xrefOffset - newOffsetTable; long differenceStream = xrefOffset - newOffsetStream; if (Math.abs(differenceTable) > Math.abs(differenceStream)) { newOffset = newOffsetStream; bfSearchXRefStreamsOffsets.remove(newOffsetStream); } else { newOffset = newOffsetTable; bfSearchXRefTablesOffsets.remove(newOffsetTable); } } else if (newOffsetTable > -1) { newOffset = newOffsetTable; bfSearchXRefTablesOffsets.remove(newOffsetTable); } else if (newOffsetStream > -1) { newOffset = newOffsetStream; bfSearchXRefStreamsOffsets.remove(newOffsetStream); } return newOffset; } private long searchNearestValue(List<Long> values, long offset) { long newValue = -1; Long currentDifference = null; int currentOffsetIndex = -1; int numberOfOffsets = values.size(); // find the nearest value for (int i = 0; i < numberOfOffsets; i++) { long newDifference = offset - values.get(i); // find the nearest offset if (currentDifference == null || (Math.abs(currentDifference) > Math.abs(newDifference))) { currentDifference = newDifference; currentOffsetIndex = i; } } if (currentOffsetIndex > -1) { newValue = values.get(currentOffsetIndex); } return newValue; } /** * Brute force search for the last EOF marker. * * @throws IOException if something went wrong */ private void bfSearchForLastEOFMarker() throws IOException { if (lastEOFMarker == null) { long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); while (!source.isEOF()) { // search for EOF marker if (isString(EOF_MARKER)) { long tempMarker = source.getPosition(); source.seek(tempMarker + 5); try { // check if the following data is some valid pdf content // which most likely indicates that the pdf is linearized, // updated or just cut off somewhere in the middle skipSpaces(); if (!isString(XREF_TABLE)) { readObjectNumber(); readGenerationNumber(); } } catch (IOException exception) { // save the EOF marker as the following data is most likely some garbage lastEOFMarker = tempMarker; } } source.read(); } source.seek(originOffset); // no EOF marker found if (lastEOFMarker == null) { lastEOFMarker = Long.MAX_VALUE; } } } /** * Brute force search for all object streams. * * @throws IOException if something went wrong */ private void bfSearchForObjStreams() throws IOException { HashMap<Long, COSObjectKey> bfSearchObjStreamsOffsets = new HashMap<>(); long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); char[] string = " obj".toCharArray(); while (!source.isEOF()) { // search for EOF marker if (isString(OBJ_STREAM)) { long currentPosition = source.getPosition(); // search backwards for the beginning of the object long newOffset = -1; COSObjectKey streamObjectKey = null; boolean objFound = false; for (int i = 1; i < 40 && !objFound; i++) { long currentOffset = currentPosition - (i * 10); if (currentOffset > 0) { source.seek(currentOffset); for (int j = 0; j < 10; j++) { if (isString(string)) { long tempOffset = currentOffset - 1; source.seek(tempOffset); int genID = source.peek(); // is the next char a digit? if (isDigit(genID)) { tempOffset--; source.seek(tempOffset); if (isSpace()) { int length = 0; source.seek(--tempOffset); while (tempOffset > MINIMUM_SEARCH_OFFSET && isDigit()) { source.seek(--tempOffset); length++; } if (length > 0) { source.read(); newOffset = source.getPosition(); long objNumber = readObjectNumber(); int genNumber = readGenerationNumber(); streamObjectKey = new COSObjectKey(objNumber, genNumber); bfSearchObjStreamsOffsets.put(newOffset, streamObjectKey); } } } LOG.debug("Dictionary start for object stream -> " + newOffset); objFound = true; break; } else { currentOffset++; source.read(); } } } } source.seek(currentPosition + OBJ_STREAM.length); } source.read(); } // add all found compressed objects to the brute force search result for (Long offset : bfSearchObjStreamsOffsets.keySet()) { long bfOffset = bfSearchCOSObjectKeyOffsets.get(bfSearchObjStreamsOffsets.get(offset)); // check if the object was overwritten if (offset == bfOffset) { source.seek(offset); long stmObjNumber = readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); COSDictionary dict = parseCOSDictionary(); int offsetFirstStream = dict.getInt(COSName.FIRST); int nrOfObjects = dict.getInt(COSName.N); COSStream stream = parseCOSStream(dict); COSInputStream is = stream.createInputStream(); byte[] numbersBytes = new byte[offsetFirstStream]; is.read(numbersBytes); is.close(); stream.close(); int start = 0; // skip spaces while (numbersBytes[start] == 32) { start++; } String numbersStr = new String(numbersBytes, start, numbersBytes.length - start, "ISO-8859-1"); String[] numbers = numbersStr.split(" "); for (int i = 0; i < nrOfObjects; i++) { long objNumber = Long.parseLong(numbers[i * 2]); COSObjectKey objKey = new COSObjectKey(objNumber, 0); Long existingOffset = bfSearchCOSObjectKeyOffsets.get(objKey); if (existingOffset == null || offset > existingOffset) { bfSearchCOSObjectKeyOffsets.put(objKey, -stmObjNumber); } } } } source.seek(originOffset); } /** * Brute force search for all xref entries (tables). * * @throws IOException if something went wrong */ private void bfSearchForXRefTables() throws IOException { if (bfSearchXRefTablesOffsets == null) { // a pdf may contain more than one xref entry bfSearchXRefTablesOffsets = new Vector<>(); long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); // search for xref tables while (!source.isEOF()) { if (isString(XREF_TABLE)) { long newOffset = source.getPosition(); source.seek(newOffset - 1); // ensure that we don't read "startxref" instead of "xref" if (isWhitespace()) { bfSearchXRefTablesOffsets.add(newOffset); } source.seek(newOffset + 4); } source.read(); } source.seek(originOffset); } } /** * Brute force search for all /XRef entries (streams). * * @throws IOException if something went wrong */ private void bfSearchForXRefStreams() throws IOException { if (bfSearchXRefStreamsOffsets == null) { // a pdf may contain more than one /XRef entry bfSearchXRefStreamsOffsets = new Vector<>(); long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); // search for XRef streams String objString = " obj"; char[] string = objString.toCharArray(); while (!source.isEOF()) { if (isString(XREF_STREAM)) { // search backwards for the beginning of the stream long newOffset = -1; long xrefOffset = source.getPosition(); boolean objFound = false; for (int i = 1; i < 40 && !objFound; i++) { long currentOffset = xrefOffset - (i * 10); if (currentOffset > 0) { source.seek(currentOffset); for (int j = 0; j < 10; j++) { if (isString(string)) { long tempOffset = currentOffset - 1; source.seek(tempOffset); int genID = source.peek(); // is the next char a digit? if (isDigit(genID)) { tempOffset--; source.seek(tempOffset); if (isSpace()) { int length = 0; source.seek(--tempOffset); while (tempOffset > MINIMUM_SEARCH_OFFSET && isDigit()) { source.seek(--tempOffset); length++; } if (length > 0) { source.read(); newOffset = source.getPosition(); } } } LOG.debug("Fixed reference for xref stream " + xrefOffset + " -> " + newOffset); objFound = true; break; } else { currentOffset++; source.read(); } } } } if (newOffset > -1) { bfSearchXRefStreamsOffsets.add(newOffset); } source.seek(xrefOffset + 5); } source.read(); } source.seek(originOffset); } } /** * Rebuild the trailer dictionary if startxref can't be found. * * @return the rebuild trailer dictionary * * @throws IOException if something went wrong */ private final COSDictionary rebuildTrailer() throws IOException { COSDictionary trailer = null; Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); // reset trailer resolver xrefTrailerResolver.reset(); // use the found objects to rebuild the trailer resolver xrefTrailerResolver.nextXrefObj(0, XRefType.TABLE); for (Entry<COSObjectKey, Long> entry : bfCOSObjectKeyOffsets.entrySet()) { xrefTrailerResolver.setXRef(entry.getKey(), entry.getValue()); } xrefTrailerResolver.setStartxref(0); trailer = xrefTrailerResolver.getTrailer(); getDocument().setTrailer(trailer); // search for the different parts of the trailer dictionary for (Entry<COSObjectKey, Long> entry : bfCOSObjectKeyOffsets.entrySet()) { Long offset = entry.getValue(); // skip compressed objects if (offset < 0) { continue; } source.seek(offset); readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); try { if (source.peek() != '<') { continue; } COSDictionary dictionary = parseCOSDictionary(); // document catalog if (isCatalog(dictionary)) { trailer.setItem(COSName.ROOT, document.getObjectFromPool(entry.getKey())); } // info dictionary else if (!dictionary.containsKey(COSName.PARENT) && (dictionary.containsKey(COSName.MOD_DATE) || dictionary.containsKey(COSName.TITLE) || dictionary.containsKey(COSName.AUTHOR) || dictionary.containsKey(COSName.SUBJECT) || dictionary.containsKey(COSName.KEYWORDS) || dictionary.containsKey(COSName.CREATOR) || dictionary.containsKey(COSName.PRODUCER) || dictionary.containsKey(COSName.CREATION_DATE))) { trailer.setItem(COSName.INFO, document.getObjectFromPool(entry.getKey())); } // encryption dictionary, if existing, is lost // We can't run "Algorithm 2" from PDF specification because of missing ID } catch (IOException exception) { LOG.debug("Skipped object " + entry.getKey() + ", either it's corrupt or not a dictionary"); } } return trailer; } /** * Tell if the dictionary is a PDF catalog. Override this for an FDF catalog. * * @param dictionary * @return */ protected boolean isCatalog(COSDictionary dictionary) { return COSName.CATALOG.equals(dictionary.getCOSName(COSName.TYPE)); } /** * This will parse the startxref section from the stream. * The startxref value is ignored. * * @return the startxref value or -1 on parsing error * @throws IOException If an IO error occurs. */ private long parseStartXref() throws IOException { long startXref = -1; if (isString(STARTXREF)) { readString(); skipSpaces(); // This integer is the byte offset of the first object referenced by the xref or xref stream startXref = readLong(); } return startXref; } /** * Checks if the given string can be found at the current offset. * * @param string the bytes of the string to look for * @return true if the bytes are in place, false if not * @throws IOException if something went wrong */ private boolean isString(byte[] string) throws IOException { boolean bytesMatching = false; if (source.peek() == string[0]) { int length = string.length; byte[] bytesRead = new byte[length]; int numberOfBytes = source.read(bytesRead, 0, length); while (numberOfBytes < length) { int readMore = source.read(bytesRead, numberOfBytes, length - numberOfBytes); if (readMore < 0) { break; } numberOfBytes += readMore; } bytesMatching = Arrays.equals(string, bytesRead); source.rewind(numberOfBytes); } return bytesMatching; } /** * Checks if the given string can be found at the current offset. * * @param string the bytes of the string to look for * @return true if the bytes are in place, false if not * @throws IOException if something went wrong */ private boolean isString(char[] string) throws IOException { boolean bytesMatching = true; long originOffset = source.getPosition(); for (char c : string) { if (source.read() != c) { bytesMatching = false; break; } } source.seek(originOffset); return bytesMatching; } /** * This will parse the trailer from the stream and add it to the state. * * @return false on parsing error * @throws IOException If an IO error occurs. */ private boolean parseTrailer() throws IOException { // parse the last trailer. trailerOffset = source.getPosition(); // PDFBOX-1739 skip extra xref entries in RegisSTAR documents if (isLenient) { int nextCharacter = source.peek(); while (nextCharacter != 't' && isDigit(nextCharacter)) { if (source.getPosition() == trailerOffset) { // warn only the first time LOG.warn("Expected trailer object at position " + trailerOffset + ", keep trying"); } readLine(); nextCharacter = source.peek(); } } if(source.peek() != 't') { return false; } //read "trailer" long currentOffset = source.getPosition(); String nextLine = readLine(); if( !nextLine.trim().equals( "trailer" ) ) { // in some cases the EOL is missing and the trailer immediately // continues with "<<" or with a blank character // even if this does not comply with PDF reference we want to support as many PDFs as possible // Acrobat reader can also deal with this. if (nextLine.startsWith("trailer")) { // we can't just unread a portion of the read data as we don't know if the EOL consist of 1 or 2 bytes int len = "trailer".length(); // jump back right after "trailer" source.seek(currentOffset + len); } else { return false; } } // in some cases the EOL is missing and the trailer continues with " <<" // even if this does not comply with PDF reference we want to support as many PDFs as possible // Acrobat reader can also deal with this. skipSpaces(); COSDictionary parsedTrailer = parseCOSDictionary(); xrefTrailerResolver.setTrailer( parsedTrailer ); skipSpaces(); return true; } /** * Parse the header of a pdf. * * @return true if a PDF header was found * @throws IOException if something went wrong */ protected boolean parsePDFHeader() throws IOException { return parseHeader(PDF_HEADER, PDF_DEFAULT_VERSION); } /** * Parse the header of a fdf. * * @return true if a FDF header was found * @throws IOException if something went wrong */ protected boolean parseFDFHeader() throws IOException { return parseHeader(FDF_HEADER, FDF_DEFAULT_VERSION); } private boolean parseHeader(String headerMarker, String defaultVersion) throws IOException { // read first line String header = readLine(); // some pdf-documents are broken and the pdf-version is in one of the following lines if (!header.contains(headerMarker)) { header = readLine(); while (!header.contains(headerMarker)) { // if a line starts with a digit, it has to be the first one with data in it if ((header.length() > 0) && (Character.isDigit(header.charAt(0)))) { break; } header = readLine(); } } // nothing found if (!header.contains(headerMarker)) { source.seek(0); return false; } //sometimes there is some garbage in the header before the header //actually starts, so lets try to find the header first. int headerStart = header.indexOf( headerMarker ); // greater than zero because if it is zero then there is no point of trimming if ( headerStart > 0 ) { //trim off any leading characters header = header.substring( headerStart, header.length() ); } // This is used if there is garbage after the header on the same line if (header.startsWith(headerMarker) && !header.matches(headerMarker + "\\d.\\d")) { if (header.length() < headerMarker.length() + 3) { // No version number at all, set to 1.4 as default header = headerMarker + defaultVersion; LOG.debug("No version found, set to " + defaultVersion + " as default."); } else { String headerGarbage = header.substring(headerMarker.length() + 3, header.length()) + "\n"; header = header.substring(0, headerMarker.length() + 3); source.rewind(headerGarbage.getBytes(ISO_8859_1).length); } } float headerVersion = -1; try { String[] headerParts = header.split("-"); if (headerParts.length == 2) { headerVersion = Float.parseFloat(headerParts[1]); } } catch (NumberFormatException exception) { LOG.debug("Can't parse the header version.", exception); } if (headerVersion < 0) { if (isLenient) { headerVersion = 1.7f; } else { throw new IOException("Error getting header version: " + header); } } document.setVersion(headerVersion); // rewind source.seek(0); return true; } /** * This will parse the xref table from the stream and add it to the state * The XrefTable contents are ignored. * @param startByteOffset the offset to start at * @return false on parsing error * @throws IOException If an IO error occurs. */ protected boolean parseXrefTable(long startByteOffset) throws IOException { long xrefTableStartOffset = source.getPosition(); if(source.peek() != 'x') { return false; } String xref = readString(); if( !xref.trim().equals( "xref" ) ) { return false; } // check for trailer after xref String str = readString(); byte[] b = str.getBytes(ISO_8859_1); source.rewind(b.length); // signal start of new XRef xrefTrailerResolver.nextXrefObj( startByteOffset, XRefType.TABLE ); if (str.startsWith("trailer")) { LOG.warn("skipping empty xref table"); return false; } // Xref tables can have multiple sections. Each starts with a starting object id and a count. while(true) { String currentLine = readLine(); String[] splitString = currentLine.split("\\s"); if (splitString.length != 2) { LOG.warn("Unexpected XRefTable Entry: " + currentLine); break; } // first obj id long currObjID = Long.parseLong(splitString[0]); // the number of objects in the xref table int count = Integer.parseInt(splitString[1]); skipSpaces(); for(int i = 0; i < count; i++) { if(source.isEOF() || isEndOfName((char)source.peek())) { break; } if(source.peek() == 't') { break; } //Ignore table contents currentLine = readLine(); splitString = currentLine.split("\\s"); if (splitString.length < 3) { LOG.warn("invalid xref line: " + currentLine); break; } /* This supports the corrupt table as reported in * PDFBOX-474 (XXXX XXX XX n) */ if(splitString[splitString.length-1].equals("n")) { try { long currOffset = Long.parseLong(splitString[0]); if (currOffset >= xrefTableStartOffset && currOffset <= source.getPosition()) { // PDFBOX-3923: offset points inside this table - that can't be good // PDFBOX-3935: don't abort (rebuilding trailer would lose encryption // dictionary), just skip // alternative fix: in checkXrefOffsets() do clear() before putAll() LOG.warn("XRefTable offset " + currOffset + " is within xref table (start offset: " + xrefTableStartOffset + ") for object " + currObjID); } else { int currGenID = Integer.parseInt(splitString[1]); COSObjectKey objKey = new COSObjectKey(currObjID, currGenID); xrefTrailerResolver.setXRef(objKey, currOffset); } } catch(NumberFormatException e) { throw new IOException(e); } } else if(!splitString[2].equals("f")) { throw new IOException("Corrupt XRefTable Entry - ObjID:" + currObjID); } currObjID++; skipSpaces(); } skipSpaces(); if (!isDigit()) { break; } } return true; } /** * Fills XRefTrailerResolver with data of given stream. * Stream must be of type XRef. * @param stream the stream to be read * @param objByteOffset the offset to start at * @param isStandalone should be set to true if the stream is not part of a hybrid xref table * @throws IOException if there is an error parsing the stream */ private void parseXrefStream(COSStream stream, long objByteOffset, boolean isStandalone) throws IOException { // the cross reference stream of a hybrid xref table will be added to the existing one // and we must not override the offset and the trailer if ( isStandalone ) { xrefTrailerResolver.nextXrefObj( objByteOffset, XRefType.STREAM ); xrefTrailerResolver.setTrailer( stream ); } PDFXrefStreamParser parser = new PDFXrefStreamParser( stream, document, xrefTrailerResolver ); parser.parse(); } /** * This will get the document that was parsed. parse() must be called before this is called. * When you are done with this document you must call close() on it to release * resources. * * @return The document that was parsed. * * @throws IOException If there is an error getting the document. */ public COSDocument getDocument() throws IOException { if( document == null ) { throw new IOException( "You must call parse() before calling getDocument()" ); } return document; } /** * Parse the values of the trailer dictionary and return the root object. * * @param trailer The trailer dictionary. * @return The parsed root object. * @throws IOException If an IO error occurs or if the root object is * missing in the trailer dictionary. */ protected COSBase parseTrailerValuesDynamically(COSDictionary trailer) throws IOException { // PDFBOX-1557 - ensure that all COSObject are loaded in the trailer // PDFBOX-1606 - after securityHandler has been instantiated for (COSBase trailerEntry : trailer.getValues()) { if (trailerEntry instanceof COSObject) { COSObject tmpObj = (COSObject) trailerEntry; parseObjectDynamically(tmpObj, false); } } // parse catalog or root object COSObject root = (COSObject) trailer.getItem(COSName.ROOT); if (root == null) { throw new IOException("Missing root object specification in trailer."); } return parseObjectDynamically(root, false); } }
pdfbox/src/main/java/org/apache/pdfbox/pdfparser/COSParser.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdfparser; import static org.apache.pdfbox.util.Charsets.ISO_8859_1; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import java.util.Set; import java.util.TreeMap; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSDocument; import org.apache.pdfbox.cos.COSInputStream; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSNull; import org.apache.pdfbox.cos.COSNumber; import org.apache.pdfbox.cos.COSObject; import org.apache.pdfbox.cos.COSObjectKey; import org.apache.pdfbox.cos.COSStream; import org.apache.pdfbox.io.RandomAccessRead; import org.apache.pdfbox.pdfparser.XrefTrailerResolver.XRefType; import org.apache.pdfbox.pdmodel.encryption.SecurityHandler; /** * PDF-Parser which first reads startxref and xref tables in order to know valid objects and parse only these objects. * * First {@link PDFParser#parse()} or {@link FDFParser#parse()} must be called before page objects * can be retrieved, e.g. {@link PDFParser#getPDDocument()}. * * This class is a much enhanced version of <code>QuickParser</code> presented in <a * href="https://issues.apache.org/jira/browse/PDFBOX-1104">PDFBOX-1104</a> by Jeremy Villalobos. */ public class COSParser extends BaseParser { private static final String PDF_HEADER = "%PDF-"; private static final String FDF_HEADER = "%FDF-"; private static final String PDF_DEFAULT_VERSION = "1.4"; private static final String FDF_DEFAULT_VERSION = "1.0"; private static final char[] XREF_TABLE = new char[] { 'x', 'r', 'e', 'f' }; private static final char[] XREF_STREAM = new char[] { '/', 'X', 'R', 'e', 'f' }; private static final char[] STARTXREF = new char[] { 's','t','a','r','t','x','r','e','f' }; private static final byte[] ENDSTREAM = new byte[] { E, N, D, S, T, R, E, A, M }; private static final byte[] ENDOBJ = new byte[] { E, N, D, O, B, J }; private static final long MINIMUM_SEARCH_OFFSET = 6; private static final int X = 'x'; private static final int STRMBUFLEN = 2048; private final byte[] strmBuf = new byte[ STRMBUFLEN ]; protected final RandomAccessRead source; /** * Only parse the PDF file minimally allowing access to basic information. */ public static final String SYSPROP_PARSEMINIMAL = "org.apache.pdfbox.pdfparser.nonSequentialPDFParser.parseMinimal"; /** * The range within the %%EOF marker will be searched. * Useful if there are additional characters after %%EOF within the PDF. */ public static final String SYSPROP_EOFLOOKUPRANGE = "org.apache.pdfbox.pdfparser.nonSequentialPDFParser.eofLookupRange"; /** * How many trailing bytes to read for EOF marker. */ private static final int DEFAULT_TRAIL_BYTECOUNT = 2048; /** * EOF-marker. */ protected static final char[] EOF_MARKER = new char[] { '%', '%', 'E', 'O', 'F' }; /** * obj-marker. */ protected static final char[] OBJ_MARKER = new char[] { 'o', 'b', 'j' }; /** * ObjStream-marker. */ private static final char[] OBJ_STREAM = new char[] { '/', 'O', 'b', 'j', 'S', 't', 'm' }; private long trailerOffset; /** * file length. */ protected long fileLen; /** * is parser using auto healing capacity ? */ private boolean isLenient = true; protected boolean initialParseDone = false; /** * Contains all found objects of a brute force search. */ private Map<COSObjectKey, Long> bfSearchCOSObjectKeyOffsets = null; private Long lastEOFMarker = null; private List<Long> bfSearchXRefTablesOffsets = null; private List<Long> bfSearchXRefStreamsOffsets = null; /** * The security handler. */ protected SecurityHandler securityHandler = null; /** * how many trailing bytes to read for EOF marker. */ private int readTrailBytes = DEFAULT_TRAIL_BYTECOUNT; private static final Log LOG = LogFactory.getLog(COSParser.class); /** * Collects all Xref/trailer objects and resolves them into single * object using startxref reference. */ protected XrefTrailerResolver xrefTrailerResolver = new XrefTrailerResolver(); /** * The prefix for the temp file being used. */ public static final String TMP_FILE_PREFIX = "tmpPDF"; /** * Default constructor. */ public COSParser(RandomAccessRead source) { super(new RandomAccessSource(source)); this.source = source; } /** * Sets how many trailing bytes of PDF file are searched for EOF marker and 'startxref' marker. If not set we use * default value {@link #DEFAULT_TRAIL_BYTECOUNT}. * * <p>We check that new value is at least 16. However for practical use cases this value should not be lower than * 1000; even 2000 was found to not be enough in some cases where some trailing garbage like HTML snippets followed * the EOF marker.</p> * * <p> * In case system property {@link #SYSPROP_EOFLOOKUPRANGE} is defined this value will be set on initialization but * can be overwritten later. * </p> * * @param byteCount number of trailing bytes */ public void setEOFLookupRange(int byteCount) { if (byteCount > 15) { readTrailBytes = byteCount; } } /** * Read the trailer information and provide a COSDictionary containing the trailer information. * * @return a COSDictionary containing the trailer information * @throws IOException if something went wrong */ protected COSDictionary retrieveTrailer() throws IOException { COSDictionary trailer = null; boolean rebuildTrailer = false; try { // parse startxref // TODO FDF files don't have a startxref value, so that rebuildTrailer is triggered long startXRefOffset = getStartxrefOffset(); if (startXRefOffset > -1) { trailer = parseXref(startXRefOffset); } else { rebuildTrailer = isLenient(); } } catch (IOException exception) { if (isLenient()) { rebuildTrailer = true; } else { throw exception; } } // check if the trailer contains a Root object if (trailer != null && trailer.getItem(COSName.ROOT) == null) { rebuildTrailer = isLenient(); } if (rebuildTrailer) { trailer = rebuildTrailer(); } return trailer; } /** * Parses cross reference tables. * * @param startXRefOffset start offset of the first table * @return the trailer dictionary * @throws IOException if something went wrong */ private COSDictionary parseXref(long startXRefOffset) throws IOException { source.seek(startXRefOffset); long startXrefOffset = Math.max(0, parseStartXref()); // check the startxref offset long fixedOffset = checkXRefOffset(startXrefOffset); if (fixedOffset > -1) { startXrefOffset = fixedOffset; } document.setStartXref(startXrefOffset); long prev = startXrefOffset; // ---- parse whole chain of xref tables/object streams using PREV reference Set<Long> prevSet = new HashSet<>(); while (prev > 0) { // seek to xref table source.seek(prev); // skip white spaces skipSpaces(); // -- parse xref if (source.peek() == X) { // xref table and trailer // use existing parser to parse xref table parseXrefTable(prev); if (!parseTrailer()) { throw new IOException("Expected trailer object at position: " + source.getPosition()); } COSDictionary trailer = xrefTrailerResolver.getCurrentTrailer(); // check for a XRef stream, it may contain some object ids of compressed objects if(trailer.containsKey(COSName.XREF_STM)) { int streamOffset = trailer.getInt(COSName.XREF_STM); // check the xref stream reference fixedOffset = checkXRefOffset(streamOffset); if (fixedOffset > -1 && fixedOffset != streamOffset) { LOG.warn("/XRefStm offset " + streamOffset + " is incorrect, corrected to " + fixedOffset); streamOffset = (int)fixedOffset; trailer.setInt(COSName.XREF_STM, streamOffset); } if (streamOffset > 0) { source.seek(streamOffset); skipSpaces(); try { parseXrefObjStream(prev, false); } catch (IOException ex) { if (isLenient) { LOG.error("Failed to parse /XRefStm at offset " + streamOffset, ex); } else { throw ex; } } } else { if(isLenient) { LOG.error("Skipped XRef stream due to a corrupt offset:"+streamOffset); } else { throw new IOException("Skipped XRef stream due to a corrupt offset:"+streamOffset); } } } prev = trailer.getLong(COSName.PREV); if (prev > 0) { // check the xref table reference fixedOffset = checkXRefOffset(prev); if (fixedOffset > -1 && fixedOffset != prev) { prev = fixedOffset; trailer.setLong(COSName.PREV, prev); } } } else { // parse xref stream prev = parseXrefObjStream(prev, true); if (prev > 0) { // check the xref table reference fixedOffset = checkXRefOffset(prev); if (fixedOffset > -1 && fixedOffset != prev) { prev = fixedOffset; COSDictionary trailer = xrefTrailerResolver.getCurrentTrailer(); trailer.setLong(COSName.PREV, prev); } } } if (prevSet.contains(prev)) { throw new IOException("/Prev loop at offset " + prev); } prevSet.add(prev); } // ---- build valid xrefs out of the xref chain xrefTrailerResolver.setStartxref(startXrefOffset); COSDictionary trailer = xrefTrailerResolver.getTrailer(); document.setTrailer(trailer); document.setIsXRefStream(XRefType.STREAM == xrefTrailerResolver.getXrefType()); // check the offsets of all referenced objects checkXrefOffsets(); // copy xref table document.addXRefTable(xrefTrailerResolver.getXrefTable()); return trailer; } /** * Parses an xref object stream starting with indirect object id. * * @return value of PREV item in dictionary or <code>-1</code> if no such item exists */ private long parseXrefObjStream(long objByteOffset, boolean isStandalone) throws IOException { // ---- parse indirect object head readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); COSDictionary dict = parseCOSDictionary(); try (COSStream xrefStream = parseCOSStream(dict)) { parseXrefStream(xrefStream, objByteOffset, isStandalone); } return dict.getLong(COSName.PREV); } /** * Looks for and parses startxref. We first look for last '%%EOF' marker (within last * {@link #DEFAULT_TRAIL_BYTECOUNT} bytes (or range set via {@link #setEOFLookupRange(int)}) and go back to find * <code>startxref</code>. * * @return the offset of StartXref * @throws IOException If something went wrong. */ private final long getStartxrefOffset() throws IOException { byte[] buf; long skipBytes; // read trailing bytes into buffer try { final int trailByteCount = (fileLen < readTrailBytes) ? (int) fileLen : readTrailBytes; buf = new byte[trailByteCount]; skipBytes = fileLen - trailByteCount; source.seek(skipBytes); int off = 0; int readBytes; while (off < trailByteCount) { readBytes = source.read(buf, off, trailByteCount - off); // in order to not get stuck in a loop we check readBytes (this should never happen) if (readBytes < 1) { throw new IOException( "No more bytes to read for trailing buffer, but expected: " + (trailByteCount - off)); } off += readBytes; } } finally { source.seek(0); } // find last '%%EOF' int bufOff = lastIndexOf(EOF_MARKER, buf, buf.length); if (bufOff < 0) { if (isLenient) { // in lenient mode the '%%EOF' isn't needed bufOff = buf.length; LOG.debug("Missing end of file marker '" + new String(EOF_MARKER) + "'"); } else { throw new IOException("Missing end of file marker '" + new String(EOF_MARKER) + "'"); } } // find last startxref preceding EOF marker bufOff = lastIndexOf(STARTXREF, buf, bufOff); if (bufOff < 0) { throw new IOException("Missing 'startxref' marker."); } else { return skipBytes + bufOff; } } /** * Searches last appearance of pattern within buffer. Lookup before _lastOff and goes back until 0. * * @param pattern pattern to search for * @param buf buffer to search pattern in * @param endOff offset (exclusive) where lookup starts at * * @return start offset of pattern within buffer or <code>-1</code> if pattern could not be found */ protected int lastIndexOf(final char[] pattern, final byte[] buf, final int endOff) { final int lastPatternChOff = pattern.length - 1; int bufOff = endOff; int patOff = lastPatternChOff; char lookupCh = pattern[patOff]; while (--bufOff >= 0) { if (buf[bufOff] == lookupCh) { if (--patOff < 0) { // whole pattern matched return bufOff; } // matched current char, advance to preceding one lookupCh = pattern[patOff]; } else if (patOff < lastPatternChOff) { // no char match but already matched some chars; reset patOff = lastPatternChOff; lookupCh = pattern[patOff]; } } return -1; } /** * Return true if parser is lenient. Meaning auto healing capacity of the parser are used. * * @return true if parser is lenient */ public boolean isLenient() { return isLenient; } /** * Change the parser leniency flag. * * This method can only be called before the parsing of the file. * * @param lenient try to handle malformed PDFs. * */ public void setLenient(boolean lenient) { if (initialParseDone) { throw new IllegalArgumentException("Cannot change leniency after parsing"); } this.isLenient = lenient; } /** * Creates a unique object id using object number and object generation * number. (requires object number &lt; 2^31)) */ private long getObjectId(final COSObject obj) { return obj.getObjectNumber() << 32 | obj.getGenerationNumber(); } /** * Adds all from newObjects to toBeParsedList if it is not an COSObject or * we didn't add this COSObject already (checked via addedObjects). */ private void addNewToList(final Queue<COSBase> toBeParsedList, final Collection<COSBase> newObjects, final Set<Long> addedObjects) { for (COSBase newObject : newObjects) { addNewToList(toBeParsedList, newObject, addedObjects); } } /** * Adds newObject to toBeParsedList if it is not an COSObject or we didn't * add this COSObject already (checked via addedObjects). */ private void addNewToList(final Queue<COSBase> toBeParsedList, final COSBase newObject, final Set<Long> addedObjects) { if (newObject instanceof COSObject) { final long objId = getObjectId((COSObject) newObject); if (!addedObjects.add(objId)) { return; } } toBeParsedList.add(newObject); } /** * Will parse every object necessary to load a single page from the pdf document. We try our * best to order objects according to offset in file before reading to minimize seek operations. * * @param dict the COSObject from the parent pages. * @param excludeObjects dictionary object reference entries with these names will not be parsed * * @throws IOException if something went wrong */ protected void parseDictObjects(COSDictionary dict, COSName... excludeObjects) throws IOException { // ---- create queue for objects waiting for further parsing final Queue<COSBase> toBeParsedList = new LinkedList<>(); // offset ordered object map final TreeMap<Long, List<COSObject>> objToBeParsed = new TreeMap<>(); // in case of compressed objects offset points to stmObj final Set<Long> parsedObjects = new HashSet<>(); final Set<Long> addedObjects = new HashSet<>(); addExcludedToList(excludeObjects, dict, parsedObjects); addNewToList(toBeParsedList, dict.getValues(), addedObjects); // ---- go through objects to be parsed while (!(toBeParsedList.isEmpty() && objToBeParsed.isEmpty())) { // -- first get all COSObject from other kind of objects and // put them in objToBeParsed; afterwards toBeParsedList is empty COSBase baseObj; while ((baseObj = toBeParsedList.poll()) != null) { if (baseObj instanceof COSDictionary) { addNewToList(toBeParsedList, ((COSDictionary) baseObj).getValues(), addedObjects); } else if (baseObj instanceof COSArray) { for (COSBase cosBase : ((COSArray) baseObj)) { addNewToList(toBeParsedList, cosBase, addedObjects); } } else if (baseObj instanceof COSObject) { COSObject obj = (COSObject) baseObj; long objId = getObjectId(obj); COSObjectKey objKey = new COSObjectKey(obj.getObjectNumber(), obj.getGenerationNumber()); if (!parsedObjects.contains(objId)) { Long fileOffset = document.getXrefTable().get(objKey); if (fileOffset == null && isLenient) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); fileOffset = bfCOSObjectKeyOffsets.get(objKey); if (fileOffset != null) { LOG.debug("Set missing " + fileOffset + " for object " + objKey); document.getXrefTable().put(objKey, fileOffset); } } // it is allowed that object references point to null, thus we have to test if (fileOffset != null && fileOffset != 0) { if (fileOffset > 0) { objToBeParsed.put(fileOffset, Collections.singletonList(obj)); } else { // negative offset means we have a compressed // object within object stream; // get offset of object stream COSObjectKey key = new COSObjectKey((int) -fileOffset, 0); fileOffset = document.getXrefTable().get(key); if ((fileOffset == null) || (fileOffset <= 0)) { if (isLenient) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); fileOffset = bfCOSObjectKeyOffsets.get(key); if (fileOffset != null) { LOG.debug("Set missing " + fileOffset + " for object " + key); document.getXrefTable().put(key, fileOffset); } } else { throw new IOException( "Invalid object stream xref object reference for key '" + objKey + "': " + fileOffset); } } List<COSObject> stmObjects = objToBeParsed.get(fileOffset); if (stmObjects == null) { stmObjects = new ArrayList<>(); objToBeParsed.put(fileOffset, stmObjects); } // java does not have a test for immutable else if (!(stmObjects instanceof ArrayList)) { throw new IOException(obj + " cannot be assigned to offset " + fileOffset + ", this belongs to " + stmObjects.get(0)); } stmObjects.add(obj); } } else { // NULL object COSObject pdfObject = document.getObjectFromPool(objKey); pdfObject.setObject(COSNull.NULL); } } } } // ---- read first COSObject with smallest offset // resulting object will be added to toBeParsedList if (objToBeParsed.isEmpty()) { break; } for (COSObject obj : objToBeParsed.remove(objToBeParsed.firstKey())) { COSBase parsedObj = parseObjectDynamically(obj, false); if (parsedObj != null) { obj.setObject(parsedObj); addNewToList(toBeParsedList, parsedObj, addedObjects); parsedObjects.add(getObjectId(obj)); } } } } // add objects not to be parsed to list of already parsed objects private void addExcludedToList(COSName[] excludeObjects, COSDictionary dict, final Set<Long> parsedObjects) { if (excludeObjects != null) { for (COSName objName : excludeObjects) { COSBase baseObj = dict.getItem(objName); if (baseObj instanceof COSObject) { parsedObjects.add(getObjectId((COSObject) baseObj)); } } } } /** * This will parse the next object from the stream and add it to the local state. * * @param obj object to be parsed (we only take object number and generation number for lookup start offset) * @param requireExistingNotCompressedObj if <code>true</code> object to be parsed must not be contained within * compressed stream * @return the parsed object (which is also added to document object) * * @throws IOException If an IO error occurs. */ protected final COSBase parseObjectDynamically(COSObject obj, boolean requireExistingNotCompressedObj) throws IOException { return parseObjectDynamically(obj.getObjectNumber(), obj.getGenerationNumber(), requireExistingNotCompressedObj); } /** * This will parse the next object from the stream and add it to the local state. * It's reduced to parsing an indirect object. * * @param objNr object number of object to be parsed * @param objGenNr object generation number of object to be parsed * @param requireExistingNotCompressedObj if <code>true</code> the object to be parsed must be defined in xref * (comment: null objects may be missing from xref) and it must not be a compressed object within object stream * (this is used to circumvent being stuck in a loop in a malicious PDF) * * @return the parsed object (which is also added to document object) * * @throws IOException If an IO error occurs. */ protected COSBase parseObjectDynamically(long objNr, int objGenNr, boolean requireExistingNotCompressedObj) throws IOException { // ---- create object key and get object (container) from pool final COSObjectKey objKey = new COSObjectKey(objNr, objGenNr); final COSObject pdfObject = document.getObjectFromPool(objKey); if (pdfObject.getObject() == null) { // not previously parsed // ---- read offset or object stream object number from xref table Long offsetOrObjstmObNr = document.getXrefTable().get(objKey); // maybe something is wrong with the xref table -> perform brute force search for all objects if (offsetOrObjstmObNr == null && isLenient) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); offsetOrObjstmObNr = bfCOSObjectKeyOffsets.get(objKey); if (offsetOrObjstmObNr != null) { LOG.debug("Set missing offset " + offsetOrObjstmObNr + " for object " + objKey); document.getXrefTable().put(objKey, offsetOrObjstmObNr); } } // sanity test to circumvent loops with broken documents if (requireExistingNotCompressedObj && ((offsetOrObjstmObNr == null) || (offsetOrObjstmObNr <= 0))) { throw new IOException("Object must be defined and must not be compressed object: " + objKey.getNumber() + ":" + objKey.getGeneration()); } if (offsetOrObjstmObNr == null) { // not defined object -> NULL object (Spec. 1.7, chap. 3.2.9) pdfObject.setObject(COSNull.NULL); } else if (offsetOrObjstmObNr > 0) { // offset of indirect object in file parseFileObject(offsetOrObjstmObNr, objKey, pdfObject); } else { // xref value is object nr of object stream containing object to be parsed // since our object was not found it means object stream was not parsed so far parseObjectStream((int) -offsetOrObjstmObNr); } } return pdfObject.getObject(); } private void parseFileObject(Long offsetOrObjstmObNr, final COSObjectKey objKey, final COSObject pdfObject) throws IOException { // ---- go to object start source.seek(offsetOrObjstmObNr); // ---- we must have an indirect object final long readObjNr = readObjectNumber(); final int readObjGen = readGenerationNumber(); readExpectedString(OBJ_MARKER, true); // ---- consistency check if ((readObjNr != objKey.getNumber()) || (readObjGen != objKey.getGeneration())) { throw new IOException("XREF for " + objKey.getNumber() + ":" + objKey.getGeneration() + " points to wrong object: " + readObjNr + ":" + readObjGen + " at offset " + offsetOrObjstmObNr); } skipSpaces(); COSBase pb = parseDirObject(); String endObjectKey = readString(); if (endObjectKey.equals(STREAM_STRING)) { source.rewind(endObjectKey.getBytes(ISO_8859_1).length); if (pb instanceof COSDictionary) { COSStream stream = parseCOSStream((COSDictionary) pb); if (securityHandler != null) { securityHandler.decryptStream(stream, objKey.getNumber(), objKey.getGeneration()); } pb = stream; } else { // this is not legal // the combination of a dict and the stream/endstream // forms a complete stream object throw new IOException("Stream not preceded by dictionary (offset: " + offsetOrObjstmObNr + ")."); } skipSpaces(); endObjectKey = readLine(); // we have case with a second 'endstream' before endobj if (!endObjectKey.startsWith(ENDOBJ_STRING) && endObjectKey.startsWith(ENDSTREAM_STRING)) { endObjectKey = endObjectKey.substring(9).trim(); if (endObjectKey.length() == 0) { // no other characters in extra endstream line // read next line endObjectKey = readLine(); } } } else if (securityHandler != null) { securityHandler.decrypt(pb, objKey.getNumber(), objKey.getGeneration()); } pdfObject.setObject(pb); if (!endObjectKey.startsWith(ENDOBJ_STRING)) { if (isLenient) { LOG.warn("Object (" + readObjNr + ":" + readObjGen + ") at offset " + offsetOrObjstmObNr + " does not end with 'endobj' but with '" + endObjectKey + "'"); } else { throw new IOException("Object (" + readObjNr + ":" + readObjGen + ") at offset " + offsetOrObjstmObNr + " does not end with 'endobj' but with '" + endObjectKey + "'"); } } } private void parseObjectStream(int objstmObjNr) throws IOException { final COSBase objstmBaseObj = parseObjectDynamically(objstmObjNr, 0, true); if (objstmBaseObj instanceof COSStream) { // parse object stream PDFObjectStreamParser parser; try { parser = new PDFObjectStreamParser((COSStream) objstmBaseObj, document); } catch (IOException ex) { if (isLenient) { LOG.error("object stream " + objstmObjNr + " could not be parsed due to an exception", ex); return; } else { throw ex; } } try { parser.parse(); } catch(IOException exception) { if (isLenient) { LOG.debug("Stop reading object stream "+objstmObjNr+" due to an exception", exception); // the error is handled in parseDictObjects return; } else { throw exception; } } // register all objects which are referenced to be contained in object stream for (COSObject next : parser.getObjects()) { COSObjectKey stmObjKey = new COSObjectKey(next); Long offset = document.getXrefTable().get(stmObjKey); if (offset != null && offset == -objstmObjNr) { COSObject stmObj = document.getObjectFromPool(stmObjKey); stmObj.setObject(next.getObject()); } } } } /** * Returns length value referred to or defined in given object. */ private COSNumber getLength(final COSBase lengthBaseObj, final COSName streamType) throws IOException { if (lengthBaseObj == null) { return null; } COSNumber retVal = null; // maybe length was given directly if (lengthBaseObj instanceof COSNumber) { retVal = (COSNumber) lengthBaseObj; } // length in referenced object else if (lengthBaseObj instanceof COSObject) { COSObject lengthObj = (COSObject) lengthBaseObj; if (lengthObj.getObject() == null) { // not read so far, keep current stream position final long curFileOffset = source.getPosition(); boolean isObjectStream = COSName.OBJ_STM.equals(streamType); parseObjectDynamically(lengthObj, isObjectStream); // reset current stream position source.seek(curFileOffset); if (lengthObj.getObject() == null) { throw new IOException("Length object content was not read."); } } if (!(lengthObj.getObject() instanceof COSNumber)) { throw new IOException("Wrong type of referenced length object " + lengthObj + ": " + lengthObj.getObject().getClass().getSimpleName()); } retVal = (COSNumber) lengthObj.getObject(); } else { throw new IOException("Wrong type of length object: " + lengthBaseObj.getClass().getSimpleName()); } return retVal; } private static final int STREAMCOPYBUFLEN = 8192; private final byte[] streamCopyBuf = new byte[STREAMCOPYBUFLEN]; /** * This will read a COSStream from the input stream using length attribute within dictionary. If * length attribute is a indirect reference it is first resolved to get the stream length. This * means we copy stream data without testing for 'endstream' or 'endobj' and thus it is no * problem if these keywords occur within stream. We require 'endstream' to be found after * stream data is read. * * @param dic dictionary that goes with this stream. * * @return parsed pdf stream. * * @throws IOException if an error occurred reading the stream, like problems with reading * length attribute, stream does not end with 'endstream' after data read, stream too short etc. */ protected COSStream parseCOSStream(COSDictionary dic) throws IOException { COSStream stream = document.createCOSStream(dic); // read 'stream'; this was already tested in parseObjectsDynamically() readString(); skipWhiteSpaces(); /* * This needs to be dic.getItem because when we are parsing, the underlying object might still be null. */ COSNumber streamLengthObj = getLength(dic.getItem(COSName.LENGTH), dic.getCOSName(COSName.TYPE)); if (streamLengthObj == null) { if (isLenient) { LOG.warn("The stream doesn't provide any stream length, using fallback readUntilEnd, at offset " + source.getPosition()); } else { throw new IOException("Missing length for stream."); } } // get output stream to copy data to try (OutputStream out = stream.createRawOutputStream()) { if (streamLengthObj != null && validateStreamLength(streamLengthObj.longValue())) { readValidStream(out, streamLengthObj); } else { readUntilEndStream(new EndstreamOutputStream(out)); } } String endStream = readString(); if (endStream.equals("endobj") && isLenient) { LOG.warn("stream ends with 'endobj' instead of 'endstream' at offset " + source.getPosition()); // avoid follow-up warning about missing endobj source.rewind(ENDOBJ.length); } else if (endStream.length() > 9 && isLenient && endStream.substring(0,9).equals(ENDSTREAM_STRING)) { LOG.warn("stream ends with '" + endStream + "' instead of 'endstream' at offset " + source.getPosition()); // unread the "extra" bytes source.rewind(endStream.substring(9).getBytes(ISO_8859_1).length); } else if (!endStream.equals(ENDSTREAM_STRING)) { throw new IOException( "Error reading stream, expected='endstream' actual='" + endStream + "' at offset " + source.getPosition()); } return stream; } /** * This method will read through the current stream object until * we find the keyword "endstream" meaning we're at the end of this * object. Some pdf files, however, forget to write some endstream tags * and just close off objects with an "endobj" tag so we have to handle * this case as well. * * This method is optimized using buffered IO and reduced number of * byte compare operations. * * @param out stream we write out to. * * @throws IOException if something went wrong */ private void readUntilEndStream( final OutputStream out ) throws IOException { int bufSize; int charMatchCount = 0; byte[] keyw = ENDSTREAM; // last character position of shortest keyword ('endobj') final int quickTestOffset = 5; // read next chunk into buffer; already matched chars are added to beginning of buffer while ( ( bufSize = source.read( strmBuf, charMatchCount, STRMBUFLEN - charMatchCount ) ) > 0 ) { bufSize += charMatchCount; int bIdx = charMatchCount; int quickTestIdx; // iterate over buffer, trying to find keyword match for ( int maxQuicktestIdx = bufSize - quickTestOffset; bIdx < bufSize; bIdx++ ) { // reduce compare operations by first test last character we would have to // match if current one matches; if it is not a character from keywords // we can move behind the test character; this shortcut is inspired by the // Boyer-Moore string search algorithm and can reduce parsing time by approx. 20% quickTestIdx = bIdx + quickTestOffset; if (charMatchCount == 0 && quickTestIdx < maxQuicktestIdx) { final byte ch = strmBuf[quickTestIdx]; if ( ( ch > 't' ) || ( ch < 'a' ) ) { // last character we would have to match if current character would match // is not a character from keywords -> jump behind and start over bIdx = quickTestIdx; continue; } } // could be negative - but we only compare to ASCII final byte ch = strmBuf[bIdx]; if ( ch == keyw[ charMatchCount ] ) { if ( ++charMatchCount == keyw.length ) { // match found bIdx++; break; } } else { if ( ( charMatchCount == 3 ) && ( ch == ENDOBJ[ charMatchCount ] ) ) { // maybe ENDSTREAM is missing but we could have ENDOBJ keyw = ENDOBJ; charMatchCount++; } else { // no match; incrementing match start by 1 would be dumb since we already know // matched chars depending on current char read we may already have beginning // of a new match: 'e': first char matched; 'n': if we are at match position // idx 7 we already read 'e' thus 2 chars matched for each other char we have // to start matching first keyword char beginning with next read position charMatchCount = ( ch == E ) ? 1 : ( ( ch == N ) && ( charMatchCount == 7 ) ) ? 2 : 0; // search again for 'endstream' keyw = ENDSTREAM; } } } int contentBytes = Math.max( 0, bIdx - charMatchCount ); // write buffer content until first matched char to output stream if ( contentBytes > 0 ) { out.write( strmBuf, 0, contentBytes ); } if ( charMatchCount == keyw.length ) { // keyword matched; unread matched keyword (endstream/endobj) and following buffered content source.rewind( bufSize - contentBytes ); break; } else { // copy matched chars at start of buffer System.arraycopy( keyw, 0, strmBuf, 0, charMatchCount ); } } // this writes a lonely CR or drops trailing CR LF and LF out.flush(); } private void readValidStream(OutputStream out, COSNumber streamLengthObj) throws IOException { long remainBytes = streamLengthObj.longValue(); while (remainBytes > 0) { final int chunk = (remainBytes > STREAMCOPYBUFLEN) ? STREAMCOPYBUFLEN : (int) remainBytes; final int readBytes = source.read(streamCopyBuf, 0, chunk); if (readBytes <= 0) { // shouldn't happen, the stream length has already been validated throw new IOException("read error at offset " + source.getPosition() + ": expected " + chunk + " bytes, but read() returns " + readBytes); } out.write(streamCopyBuf, 0, readBytes); remainBytes -= readBytes; } } private boolean validateStreamLength(long streamLength) throws IOException { boolean streamLengthIsValid = true; long originOffset = source.getPosition(); long expectedEndOfStream = originOffset + streamLength; if (expectedEndOfStream > fileLen) { streamLengthIsValid = false; LOG.warn("The end of the stream is out of range, using workaround to read the stream, " + "stream start position: " + originOffset + ", length: " + streamLength + ", expected end position: " + expectedEndOfStream); } else { source.seek(expectedEndOfStream); skipSpaces(); if (!isString(ENDSTREAM)) { streamLengthIsValid = false; LOG.warn("The end of the stream doesn't point to the correct offset, using workaround to read the stream, " + "stream start position: " + originOffset + ", length: " + streamLength + ", expected end position: " + expectedEndOfStream); } source.seek(originOffset); } return streamLengthIsValid; } /** * Check if the cross reference table/stream can be found at the current offset. * * @param startXRefOffset * @return the revised offset * @throws IOException */ private long checkXRefOffset(long startXRefOffset) throws IOException { // repair mode isn't available in non-lenient mode if (!isLenient) { return startXRefOffset; } source.seek(startXRefOffset); skipSpaces(); if (source.peek() == X && isString(XREF_TABLE)) { return startXRefOffset; } if (startXRefOffset > 0) { if (checkXRefStreamOffset(startXRefOffset)) { return startXRefOffset; } else { return calculateXRefFixedOffset(startXRefOffset); } } // can't find a valid offset return -1; } /** * Check if the cross reference stream can be found at the current offset. * * @param startXRefOffset the expected start offset of the XRef stream * @return the revised offset * @throws IOException if something went wrong */ private boolean checkXRefStreamOffset(long startXRefOffset) throws IOException { // repair mode isn't available in non-lenient mode if (!isLenient || startXRefOffset == 0) { return true; } // seek to offset-1 source.seek(startXRefOffset-1); int nextValue = source.read(); // the first character has to be a whitespace, and then a digit if (isWhitespace(nextValue)) { skipSpaces(); if (isDigit()) { try { // it's a XRef stream readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); // check the dictionary to avoid false positives COSDictionary dict = parseCOSDictionary(); source.seek(startXRefOffset); if ("XRef".equals(dict.getNameAsString(COSName.TYPE))) { return true; } } catch (IOException exception) { // there wasn't an object of a xref stream source.seek(startXRefOffset); } } } return false; } /** * Try to find a fixed offset for the given xref table/stream. * * @param objectOffset the given offset where to look at * @return the fixed offset * * @throws IOException if something went wrong */ private long calculateXRefFixedOffset(long objectOffset) throws IOException { if (objectOffset < 0) { LOG.error("Invalid object offset " + objectOffset + " when searching for a xref table/stream"); return 0; } // start a brute force search for all xref tables and try to find the offset we are looking for long newOffset = bfSearchForXRef(objectOffset); if (newOffset > -1) { LOG.debug("Fixed reference for xref table/stream " + objectOffset + " -> " + newOffset); return newOffset; } LOG.error("Can't find the object xref table/stream at offset " + objectOffset); return 0; } private boolean validateXrefOffsets(Map<COSObjectKey, Long> xrefOffset) throws IOException { if (xrefOffset == null) { return true; } for (Entry<COSObjectKey, Long> objectEntry : xrefOffset.entrySet()) { COSObjectKey objectKey = objectEntry.getKey(); Long objectOffset = objectEntry.getValue(); // a negative offset number represents a object number itself // see type 2 entry in xref stream if (objectOffset != null && objectOffset >= 0 && !checkObjectKeys(objectKey, objectOffset)) { LOG.debug("Stop checking xref offsets as at least one (" + objectKey + ") couldn't be dereferenced"); return false; } } return true; } /** * Check the XRef table by dereferencing all objects and fixing the offset if necessary. * * @throws IOException if something went wrong. */ private void checkXrefOffsets() throws IOException { // repair mode isn't available in non-lenient mode if (!isLenient) { return; } Map<COSObjectKey, Long> xrefOffset = xrefTrailerResolver.getXrefTable(); if (!validateXrefOffsets(xrefOffset)) { Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); if (!bfCOSObjectKeyOffsets.isEmpty()) { List<COSObjectKey> objStreams = new ArrayList<>(); // find all object streams for (Entry<COSObjectKey, Long> entry : xrefOffset.entrySet()) { Long offset = entry.getValue(); if (offset != null && offset < 0) { COSObjectKey objStream = new COSObjectKey(-offset, 0); if (!objStreams.contains(objStream)) { objStreams.add(new COSObjectKey(-offset, 0)); } } } // remove all found object streams if (!objStreams.isEmpty()) { for (COSObjectKey key : objStreams) { if (bfCOSObjectKeyOffsets.containsKey(key)) { // remove all parsed objects which are part of an object stream Set<Long> objects = xrefTrailerResolver .getContainedObjectNumbers((int) (key.getNumber())); for (Long objNr : objects) { COSObjectKey streamObjectKey = new COSObjectKey(objNr, 0); Long streamObjectOffset = bfCOSObjectKeyOffsets .get(streamObjectKey); if (streamObjectOffset != null && streamObjectOffset > 0) { bfCOSObjectKeyOffsets.remove(streamObjectKey); } } } else { // remove all objects which are part of an object stream which wasn't found Set<Long> objects = xrefTrailerResolver .getContainedObjectNumbers((int) (key.getNumber())); for (Long objNr : objects) { xrefOffset.remove(new COSObjectKey(objNr, 0)); } } } } LOG.debug("Replaced read xref table with the results of a brute force search"); xrefOffset.putAll(bfCOSObjectKeyOffsets); } } } /** * Check if the given object can be found at the given offset. * * @param objectKey the object we are looking for * @param offset the offset where to look * @return returns true if the given object can be dereferenced at the given offset * @throws IOException if something went wrong */ private boolean checkObjectKeys(COSObjectKey objectKey, long offset) throws IOException { // there can't be any object at the very beginning of a pdf if (offset < MINIMUM_SEARCH_OFFSET) { return false; } long objectNr = objectKey.getNumber(); int objectGen = objectKey.getGeneration(); long originOffset = source.getPosition(); String objectString = createObjectString(objectNr, objectGen); try { source.seek(offset); if (isString(objectString.getBytes(ISO_8859_1))) { // everything is ok, return origin object key source.seek(originOffset); return true; } } catch (IOException exception) { // Swallow the exception, obviously there isn't any valid object number } finally { source.seek(originOffset); } // no valid object number found return false; } /** * Create a string for the given object id. * * @param objectID the object id * @param genID the generation id * @return the generated string */ private String createObjectString(long objectID, int genID) { return Long.toString(objectID) + " " + Integer.toString(genID) + " obj"; } private Map<COSObjectKey, Long> getBFCOSObjectOffsets() throws IOException { if (bfSearchCOSObjectKeyOffsets == null) { bfSearchForObjects(); } return bfSearchCOSObjectKeyOffsets; } /** * Brute force search for every object in the pdf. * * @throws IOException if something went wrong */ private void bfSearchForObjects() throws IOException { bfSearchForLastEOFMarker(); bfSearchCOSObjectKeyOffsets = new HashMap<>(); long originOffset = source.getPosition(); long currentOffset = MINIMUM_SEARCH_OFFSET; long lastObjectId = Long.MIN_VALUE; int lastGenID = Integer.MIN_VALUE; long lastObjOffset = Long.MIN_VALUE; char[] objString = " obj".toCharArray(); char[] endobjString = "endobj".toCharArray(); boolean endobjFound = false; do { source.seek(currentOffset); if (isString(objString)) { long tempOffset = currentOffset - 1; source.seek(tempOffset); int genID = source.peek(); // is the next char a digit? if (isDigit(genID)) { genID -= 48; tempOffset--; source.seek(tempOffset); if (isSpace()) { while (tempOffset > MINIMUM_SEARCH_OFFSET && isSpace()) { source.seek(--tempOffset); } boolean objectIDFound = false; while (tempOffset > MINIMUM_SEARCH_OFFSET && isDigit()) { source.seek(--tempOffset); objectIDFound = true; } if (objectIDFound) { source.read(); long objectId = readObjectNumber(); if (lastObjOffset > 0) { // add the former object ID only if there was a subsequent object ID bfSearchCOSObjectKeyOffsets.put( new COSObjectKey(lastObjectId, lastGenID), lastObjOffset); } lastObjectId = objectId; lastGenID = genID; lastObjOffset = tempOffset + 1; currentOffset += objString.length - 1; endobjFound = false; } } } } else if (isString(endobjString)) { endobjFound = true; currentOffset += endobjString.length - 1; } currentOffset++; } while (currentOffset < lastEOFMarker && !source.isEOF()); if ((lastEOFMarker < Long.MAX_VALUE || endobjFound) && lastObjOffset > 0) { // if the pdf wasn't cut off in the middle or if the last object ends with a "endobj" marker // the last object id has to be added here so that it can't get lost as there isn't any subsequent object id bfSearchCOSObjectKeyOffsets.put(new COSObjectKey(lastObjectId, lastGenID), lastObjOffset); } bfSearchForObjStreams(); // reestablish origin position source.seek(originOffset); } /** * Search for the offset of the given xref table/stream among those found by a brute force search. * * @return the offset of the xref entry * @throws IOException if something went wrong */ private long bfSearchForXRef(long xrefOffset) throws IOException { long newOffset = -1; long newOffsetTable = -1; long newOffsetStream = -1; bfSearchForXRefTables(); bfSearchForXRefStreams(); if (bfSearchXRefTablesOffsets != null) { // TODO to be optimized, this won't work in every case newOffsetTable = searchNearestValue(bfSearchXRefTablesOffsets, xrefOffset); } if (bfSearchXRefStreamsOffsets != null) { // TODO to be optimized, this won't work in every case newOffsetStream = searchNearestValue(bfSearchXRefStreamsOffsets, xrefOffset); } // choose the nearest value if (newOffsetTable > -1 && newOffsetStream > -1) { long differenceTable = xrefOffset - newOffsetTable; long differenceStream = xrefOffset - newOffsetStream; if (Math.abs(differenceTable) > Math.abs(differenceStream)) { newOffset = newOffsetStream; bfSearchXRefStreamsOffsets.remove(newOffsetStream); } else { newOffset = newOffsetTable; bfSearchXRefTablesOffsets.remove(newOffsetTable); } } else if (newOffsetTable > -1) { newOffset = newOffsetTable; bfSearchXRefTablesOffsets.remove(newOffsetTable); } else if (newOffsetStream > -1) { newOffset = newOffsetStream; bfSearchXRefStreamsOffsets.remove(newOffsetStream); } return newOffset; } private long searchNearestValue(List<Long> values, long offset) { long newValue = -1; Long currentDifference = null; int currentOffsetIndex = -1; int numberOfOffsets = values.size(); // find the nearest value for (int i = 0; i < numberOfOffsets; i++) { long newDifference = offset - values.get(i); // find the nearest offset if (currentDifference == null || (Math.abs(currentDifference) > Math.abs(newDifference))) { currentDifference = newDifference; currentOffsetIndex = i; } } if (currentOffsetIndex > -1) { newValue = values.get(currentOffsetIndex); } return newValue; } /** * Brute force search for the last EOF marker. * * @throws IOException if something went wrong */ private void bfSearchForLastEOFMarker() throws IOException { if (lastEOFMarker == null) { long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); while (!source.isEOF()) { // search for EOF marker if (isString(EOF_MARKER)) { long tempMarker = source.getPosition(); source.seek(tempMarker + 5); try { // check if the following data is some valid pdf content // which most likely indicates that the pdf is linearized, // updated or just cut off somewhere in the middle skipSpaces(); if (!isString(XREF_TABLE)) { readObjectNumber(); readGenerationNumber(); } } catch (IOException exception) { // save the EOF marker as the following data is most likely some garbage lastEOFMarker = tempMarker; } } source.read(); } source.seek(originOffset); // no EOF marker found if (lastEOFMarker == null) { lastEOFMarker = Long.MAX_VALUE; } } } /** * Brute force search for all object streams. * * @throws IOException if something went wrong */ private void bfSearchForObjStreams() throws IOException { HashMap<Long, COSObjectKey> bfSearchObjStreamsOffsets = new HashMap<>(); long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); char[] string = " obj".toCharArray(); while (!source.isEOF()) { // search for EOF marker if (isString(OBJ_STREAM)) { long currentPosition = source.getPosition(); // search backwards for the beginning of the object long newOffset = -1; COSObjectKey streamObjectKey = null; boolean objFound = false; for (int i = 1; i < 40 && !objFound; i++) { long currentOffset = currentPosition - (i * 10); if (currentOffset > 0) { source.seek(currentOffset); for (int j = 0; j < 10; j++) { if (isString(string)) { long tempOffset = currentOffset - 1; source.seek(tempOffset); int genID = source.peek(); // is the next char a digit? if (isDigit(genID)) { tempOffset--; source.seek(tempOffset); if (isSpace()) { int length = 0; source.seek(--tempOffset); while (tempOffset > MINIMUM_SEARCH_OFFSET && isDigit()) { source.seek(--tempOffset); length++; } if (length > 0) { source.read(); newOffset = source.getPosition(); long objNumber = readObjectNumber(); int genNumber = readGenerationNumber(); streamObjectKey = new COSObjectKey(objNumber, genNumber); bfSearchObjStreamsOffsets.put(newOffset, streamObjectKey); } } } LOG.debug("Dictionary start for object stream -> " + newOffset); objFound = true; break; } else { currentOffset++; source.read(); } } } } source.seek(currentPosition + OBJ_STREAM.length); } source.read(); } // add all found compressed objects to the brute force search result for (Long offset : bfSearchObjStreamsOffsets.keySet()) { long bfOffset = bfSearchCOSObjectKeyOffsets.get(bfSearchObjStreamsOffsets.get(offset)); // check if the object was overwritten if (offset == bfOffset) { source.seek(offset); long stmObjNumber = readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); COSDictionary dict = parseCOSDictionary(); int offsetFirstStream = dict.getInt(COSName.FIRST); int nrOfObjects = dict.getInt(COSName.N); COSStream stream = parseCOSStream(dict); COSInputStream is = stream.createInputStream(); byte[] numbersBytes = new byte[offsetFirstStream]; is.read(numbersBytes); is.close(); stream.close(); int start = 0; // skip spaces while (numbersBytes[start] == 32) { start++; } String numbersStr = new String(numbersBytes, start, numbersBytes.length - start, "ISO-8859-1"); String[] numbers = numbersStr.split(" "); for (int i = 0; i < nrOfObjects; i++) { long objNumber = Long.parseLong(numbers[i * 2]); COSObjectKey objKey = new COSObjectKey(objNumber, 0); Long existingOffset = bfSearchCOSObjectKeyOffsets.get(objKey); if (existingOffset == null || offset > existingOffset) { bfSearchCOSObjectKeyOffsets.put(objKey, -stmObjNumber); } } } } source.seek(originOffset); } /** * Brute force search for all xref entries (tables). * * @throws IOException if something went wrong */ private void bfSearchForXRefTables() throws IOException { if (bfSearchXRefTablesOffsets == null) { // a pdf may contain more than one xref entry bfSearchXRefTablesOffsets = new Vector<>(); long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); // search for xref tables while (!source.isEOF()) { if (isString(XREF_TABLE)) { long newOffset = source.getPosition(); source.seek(newOffset - 1); // ensure that we don't read "startxref" instead of "xref" if (isWhitespace()) { bfSearchXRefTablesOffsets.add(newOffset); } source.seek(newOffset + 4); } source.read(); } source.seek(originOffset); } } /** * Brute force search for all /XRef entries (streams). * * @throws IOException if something went wrong */ private void bfSearchForXRefStreams() throws IOException { if (bfSearchXRefStreamsOffsets == null) { // a pdf may contain more than one /XRef entry bfSearchXRefStreamsOffsets = new Vector<>(); long originOffset = source.getPosition(); source.seek(MINIMUM_SEARCH_OFFSET); // search for XRef streams String objString = " obj"; char[] string = objString.toCharArray(); while (!source.isEOF()) { if (isString(XREF_STREAM)) { // search backwards for the beginning of the stream long newOffset = -1; long xrefOffset = source.getPosition(); boolean objFound = false; for (int i = 1; i < 40 && !objFound; i++) { long currentOffset = xrefOffset - (i * 10); if (currentOffset > 0) { source.seek(currentOffset); for (int j = 0; j < 10; j++) { if (isString(string)) { long tempOffset = currentOffset - 1; source.seek(tempOffset); int genID = source.peek(); // is the next char a digit? if (isDigit(genID)) { tempOffset--; source.seek(tempOffset); if (isSpace()) { int length = 0; source.seek(--tempOffset); while (tempOffset > MINIMUM_SEARCH_OFFSET && isDigit()) { source.seek(--tempOffset); length++; } if (length > 0) { source.read(); newOffset = source.getPosition(); } } } LOG.debug("Fixed reference for xref stream " + xrefOffset + " -> " + newOffset); objFound = true; break; } else { currentOffset++; source.read(); } } } } if (newOffset > -1) { bfSearchXRefStreamsOffsets.add(newOffset); } source.seek(xrefOffset + 5); } source.read(); } source.seek(originOffset); } } /** * Rebuild the trailer dictionary if startxref can't be found. * * @return the rebuild trailer dictionary * * @throws IOException if something went wrong */ private final COSDictionary rebuildTrailer() throws IOException { COSDictionary trailer = null; Map<COSObjectKey, Long> bfCOSObjectKeyOffsets = getBFCOSObjectOffsets(); // reset trailer resolver xrefTrailerResolver.reset(); // use the found objects to rebuild the trailer resolver xrefTrailerResolver.nextXrefObj(0, XRefType.TABLE); for (Entry<COSObjectKey, Long> entry : bfCOSObjectKeyOffsets.entrySet()) { xrefTrailerResolver.setXRef(entry.getKey(), entry.getValue()); } xrefTrailerResolver.setStartxref(0); trailer = xrefTrailerResolver.getTrailer(); getDocument().setTrailer(trailer); // search for the different parts of the trailer dictionary for (Entry<COSObjectKey, Long> entry : bfCOSObjectKeyOffsets.entrySet()) { Long offset = entry.getValue(); // skip compressed objects if (offset < 0) { continue; } source.seek(offset); readObjectNumber(); readGenerationNumber(); readExpectedString(OBJ_MARKER, true); try { if (source.peek() != '<') { continue; } COSDictionary dictionary = parseCOSDictionary(); // document catalog if (isCatalog(dictionary)) { trailer.setItem(COSName.ROOT, document.getObjectFromPool(entry.getKey())); } // info dictionary else if (!dictionary.containsKey(COSName.PARENT) && (dictionary.containsKey(COSName.MOD_DATE) || dictionary.containsKey(COSName.TITLE) || dictionary.containsKey(COSName.AUTHOR) || dictionary.containsKey(COSName.SUBJECT) || dictionary.containsKey(COSName.KEYWORDS) || dictionary.containsKey(COSName.CREATOR) || dictionary.containsKey(COSName.PRODUCER) || dictionary.containsKey(COSName.CREATION_DATE))) { trailer.setItem(COSName.INFO, document.getObjectFromPool(entry.getKey())); } // encryption dictionary, if existing, is lost // We can't run "Algorithm 2" from PDF specification because of missing ID } catch (IOException exception) { LOG.debug("Skipped object " + entry.getKey() + ", either it's corrupt or not a dictionary"); } } return trailer; } /** * Tell if the dictionary is a PDF catalog. Override this for an FDF catalog. * * @param dictionary * @return */ protected boolean isCatalog(COSDictionary dictionary) { return COSName.CATALOG.equals(dictionary.getCOSName(COSName.TYPE)); } /** * This will parse the startxref section from the stream. * The startxref value is ignored. * * @return the startxref value or -1 on parsing error * @throws IOException If an IO error occurs. */ private long parseStartXref() throws IOException { long startXref = -1; if (isString(STARTXREF)) { readString(); skipSpaces(); // This integer is the byte offset of the first object referenced by the xref or xref stream startXref = readLong(); } return startXref; } /** * Checks if the given string can be found at the current offset. * * @param string the bytes of the string to look for * @return true if the bytes are in place, false if not * @throws IOException if something went wrong */ private boolean isString(byte[] string) throws IOException { boolean bytesMatching = false; if (source.peek() == string[0]) { int length = string.length; byte[] bytesRead = new byte[length]; int numberOfBytes = source.read(bytesRead, 0, length); while (numberOfBytes < length) { int readMore = source.read(bytesRead, numberOfBytes, length - numberOfBytes); if (readMore < 0) { break; } numberOfBytes += readMore; } bytesMatching = Arrays.equals(string, bytesRead); source.rewind(numberOfBytes); } return bytesMatching; } /** * Checks if the given string can be found at the current offset. * * @param string the bytes of the string to look for * @return true if the bytes are in place, false if not * @throws IOException if something went wrong */ private boolean isString(char[] string) throws IOException { boolean bytesMatching = true; long originOffset = source.getPosition(); for (char c : string) { if (source.read() != c) { bytesMatching = false; break; } } source.seek(originOffset); return bytesMatching; } /** * This will parse the trailer from the stream and add it to the state. * * @return false on parsing error * @throws IOException If an IO error occurs. */ private boolean parseTrailer() throws IOException { // parse the last trailer. trailerOffset = source.getPosition(); // PDFBOX-1739 skip extra xref entries in RegisSTAR documents if (isLenient) { int nextCharacter = source.peek(); while (nextCharacter != 't' && isDigit(nextCharacter)) { if (source.getPosition() == trailerOffset) { // warn only the first time LOG.warn("Expected trailer object at position " + trailerOffset + ", keep trying"); } readLine(); nextCharacter = source.peek(); } } if(source.peek() != 't') { return false; } //read "trailer" long currentOffset = source.getPosition(); String nextLine = readLine(); if( !nextLine.trim().equals( "trailer" ) ) { // in some cases the EOL is missing and the trailer immediately // continues with "<<" or with a blank character // even if this does not comply with PDF reference we want to support as many PDFs as possible // Acrobat reader can also deal with this. if (nextLine.startsWith("trailer")) { // we can't just unread a portion of the read data as we don't know if the EOL consist of 1 or 2 bytes int len = "trailer".length(); // jump back right after "trailer" source.seek(currentOffset + len); } else { return false; } } // in some cases the EOL is missing and the trailer continues with " <<" // even if this does not comply with PDF reference we want to support as many PDFs as possible // Acrobat reader can also deal with this. skipSpaces(); COSDictionary parsedTrailer = parseCOSDictionary(); xrefTrailerResolver.setTrailer( parsedTrailer ); skipSpaces(); return true; } /** * Parse the header of a pdf. * * @return true if a PDF header was found * @throws IOException if something went wrong */ protected boolean parsePDFHeader() throws IOException { return parseHeader(PDF_HEADER, PDF_DEFAULT_VERSION); } /** * Parse the header of a fdf. * * @return true if a FDF header was found * @throws IOException if something went wrong */ protected boolean parseFDFHeader() throws IOException { return parseHeader(FDF_HEADER, FDF_DEFAULT_VERSION); } private boolean parseHeader(String headerMarker, String defaultVersion) throws IOException { // read first line String header = readLine(); // some pdf-documents are broken and the pdf-version is in one of the following lines if (!header.contains(headerMarker)) { header = readLine(); while (!header.contains(headerMarker)) { // if a line starts with a digit, it has to be the first one with data in it if ((header.length() > 0) && (Character.isDigit(header.charAt(0)))) { break; } header = readLine(); } } // nothing found if (!header.contains(headerMarker)) { source.seek(0); return false; } //sometimes there is some garbage in the header before the header //actually starts, so lets try to find the header first. int headerStart = header.indexOf( headerMarker ); // greater than zero because if it is zero then there is no point of trimming if ( headerStart > 0 ) { //trim off any leading characters header = header.substring( headerStart, header.length() ); } // This is used if there is garbage after the header on the same line if (header.startsWith(headerMarker) && !header.matches(headerMarker + "\\d.\\d")) { if (header.length() < headerMarker.length() + 3) { // No version number at all, set to 1.4 as default header = headerMarker + defaultVersion; LOG.debug("No version found, set to " + defaultVersion + " as default."); } else { String headerGarbage = header.substring(headerMarker.length() + 3, header.length()) + "\n"; header = header.substring(0, headerMarker.length() + 3); source.rewind(headerGarbage.getBytes(ISO_8859_1).length); } } float headerVersion = -1; try { String[] headerParts = header.split("-"); if (headerParts.length == 2) { headerVersion = Float.parseFloat(headerParts[1]); } } catch (NumberFormatException exception) { LOG.debug("Can't parse the header version.", exception); } if (headerVersion < 0) { if (isLenient) { headerVersion = 1.7f; } else { throw new IOException("Error getting header version: " + header); } } document.setVersion(headerVersion); // rewind source.seek(0); return true; } /** * This will parse the xref table from the stream and add it to the state * The XrefTable contents are ignored. * @param startByteOffset the offset to start at * @return false on parsing error * @throws IOException If an IO error occurs. */ protected boolean parseXrefTable(long startByteOffset) throws IOException { long xrefTableStartOffset = source.getPosition(); if(source.peek() != 'x') { return false; } String xref = readString(); if( !xref.trim().equals( "xref" ) ) { return false; } // check for trailer after xref String str = readString(); byte[] b = str.getBytes(ISO_8859_1); source.rewind(b.length); // signal start of new XRef xrefTrailerResolver.nextXrefObj( startByteOffset, XRefType.TABLE ); if (str.startsWith("trailer")) { LOG.warn("skipping empty xref table"); return false; } // Xref tables can have multiple sections. Each starts with a starting object id and a count. while(true) { String currentLine = readLine(); String[] splitString = currentLine.split("\\s"); if (splitString.length != 2) { LOG.warn("Unexpected XRefTable Entry: " + currentLine); break; } // first obj id long currObjID = Long.parseLong(splitString[0]); // the number of objects in the xref table int count = Integer.parseInt(splitString[1]); skipSpaces(); for(int i = 0; i < count; i++) { if(source.isEOF() || isEndOfName((char)source.peek())) { break; } if(source.peek() == 't') { break; } //Ignore table contents currentLine = readLine(); splitString = currentLine.split("\\s"); if (splitString.length < 3) { LOG.warn("invalid xref line: " + currentLine); break; } /* This supports the corrupt table as reported in * PDFBOX-474 (XXXX XXX XX n) */ if(splitString[splitString.length-1].equals("n")) { try { long currOffset = Long.parseLong(splitString[0]); if (currOffset >= xrefTableStartOffset && currOffset <= source.getPosition()) { // PDFBOX-3923: offset points inside this table - that can't be good // PDFBOX-3935: don't abort (rebuilding trailer would lose encryption // dictionary), just skip LOG.warn("XRefTable offset " + currOffset + " is within xref table (start offset: " + xrefTableStartOffset + ") for object " + currObjID); } else { int currGenID = Integer.parseInt(splitString[1]); COSObjectKey objKey = new COSObjectKey(currObjID, currGenID); xrefTrailerResolver.setXRef(objKey, currOffset); } } catch(NumberFormatException e) { throw new IOException(e); } } else if(!splitString[2].equals("f")) { throw new IOException("Corrupt XRefTable Entry - ObjID:" + currObjID); } currObjID++; skipSpaces(); } skipSpaces(); if (!isDigit()) { break; } } return true; } /** * Fills XRefTrailerResolver with data of given stream. * Stream must be of type XRef. * @param stream the stream to be read * @param objByteOffset the offset to start at * @param isStandalone should be set to true if the stream is not part of a hybrid xref table * @throws IOException if there is an error parsing the stream */ private void parseXrefStream(COSStream stream, long objByteOffset, boolean isStandalone) throws IOException { // the cross reference stream of a hybrid xref table will be added to the existing one // and we must not override the offset and the trailer if ( isStandalone ) { xrefTrailerResolver.nextXrefObj( objByteOffset, XRefType.STREAM ); xrefTrailerResolver.setTrailer( stream ); } PDFXrefStreamParser parser = new PDFXrefStreamParser( stream, document, xrefTrailerResolver ); parser.parse(); } /** * This will get the document that was parsed. parse() must be called before this is called. * When you are done with this document you must call close() on it to release * resources. * * @return The document that was parsed. * * @throws IOException If there is an error getting the document. */ public COSDocument getDocument() throws IOException { if( document == null ) { throw new IOException( "You must call parse() before calling getDocument()" ); } return document; } /** * Parse the values of the trailer dictionary and return the root object. * * @param trailer The trailer dictionary. * @return The parsed root object. * @throws IOException If an IO error occurs or if the root object is * missing in the trailer dictionary. */ protected COSBase parseTrailerValuesDynamically(COSDictionary trailer) throws IOException { // PDFBOX-1557 - ensure that all COSObject are loaded in the trailer // PDFBOX-1606 - after securityHandler has been instantiated for (COSBase trailerEntry : trailer.getValues()) { if (trailerEntry instanceof COSObject) { COSObject tmpObj = (COSObject) trailerEntry; parseObjectDynamically(tmpObj, false); } } // parse catalog or root object COSObject root = (COSObject) trailer.getItem(COSName.ROOT); if (root == null) { throw new IOException("Missing root object specification in trailer."); } return parseObjectDynamically(root, false); } }
PDFBOX-3923, PDFBOX-3935: add comment about alternative to last change git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1810201 13f79535-47bb-0310-9956-ffa450edef68
pdfbox/src/main/java/org/apache/pdfbox/pdfparser/COSParser.java
PDFBOX-3923, PDFBOX-3935: add comment about alternative to last change
Java
apache-2.0
1721848df8cbc3fb7d1499d2d624da6e149e519a
0
iservport/helianto,iservport/helianto
/* Copyright 2005 I Serv Consultoria Empresarial Ltda. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.helianto.core.domain; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import javax.persistence.Column; import javax.persistence.DiscriminatorColumn; import javax.persistence.DiscriminatorType; import javax.persistence.DiscriminatorValue; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import javax.persistence.UniqueConstraint; import javax.persistence.Version; import org.helianto.core.PropertyMappable; import org.helianto.core.def.ActivityState; import org.helianto.core.domain.type.RootEntity; import org.helianto.core.utils.StringListUtils; import org.helianto.user.domain.User; import org.helianto.user.domain.UserGroup; import org.springframework.format.annotation.DateTimeFormat; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; /** * * <p> * Domain object to represent the logical namespace of a business * organization or individual and provide for proper isolation to * other entities trying to access its related classes. * </p> * <p> * For example, if two equipment sets must be distinguished in * logical spaces to avoid identity collision, they * must be associated to different entities. This is also applicable for many * other domain classes, like accounts, statements, parts, processes, etc. * The <code>Entity</code> is the root for many of such objects and allow * for the desirable isolation between two or more organizations, or even * smaller units within one organization. In other words, an <code>Entity</code> * 'controls' a whole group of domain object instances. * </p> * <p> * A real world entity usually has many related properties, like * address or trade mark. An <code>Entity</code> here, though, is * designed not to hold much information, namely only an unique name. That makes * it flexible enough to be associated to virtually any real world * entity, even individuals. * </p> * <p> * A small footprint is also desirable for some serialization strategies * like Hibernate's (www.hibernate.org) non-lazy loading. * </p> * @author Mauricio Fernandes de Castro * * */ @javax.persistence.Entity @Table(name="core_entity", uniqueConstraints = {@UniqueConstraint(columnNames={"operatorId", "alias"})} ) @Inheritance(strategy = InheritanceType.SINGLE_TABLE) @DiscriminatorColumn( name="type", discriminatorType=DiscriminatorType.CHAR ) @DiscriminatorValue("0") public class Entity implements RootEntity , PropertyMappable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy=GenerationType.AUTO) private int id; @Version private int version; @JsonIgnore @JsonBackReference @ManyToOne @JoinColumn(name="operatorId", nullable=true) private Operator operator; @Column(length=20) private String alias = ""; @DateTimeFormat(style="S-") @Temporal(TemporalType.TIMESTAMP) private Date installDate; private char entityType = 'C'; @Column(length=128) private String nature = ""; @Column(length=128) private String customColors = ""; @Column(length=128) private String customStyle = ""; @Column(length=128) private String customProperties = ""; @Column(length=1024) private String summary = ""; @Transient private Identity manager; @Column(length=128) private String externalLogoUrl = ""; private char activityState = 'A'; @OneToMany(mappedBy="entity") private Set<UserGroup> users = new HashSet<UserGroup>(0); @JsonManagedReference @OneToMany(mappedBy="entity") private Set<PublicEntity> publicEntities = new HashSet<PublicEntity>(0); @Transient private List<UserGroup> userList; /** * Default constructor. */ public Entity() { super(); } /** * Operator constructor. * * @param operator */ public Entity(Operator operator) { this(); setOperator(operator); } /** * Key constructor. * * @param operator * @param alias */ public Entity(Operator operator, String alias) { this(operator); setAlias(alias); setInstallDate(new Date()); } /** * User constructor. * * @param user */ public Entity(User user) { this(user.getOperator()); setManager(user.getIdentity()); } public int getId() { return this.id; } public void setId(int id) { this.id = id; } /** * Version. */ public int getVersion() { return this.version; } public void setVersion(int version) { this.version = version; } /** * Operator, lazy loaded. */ public Operator getOperator() { return this.operator; } public void setOperator(Operator operator) { this.operator = operator; } public int getContextId() { if (getOperator()!=null) { return getOperator().getId(); } return 0; } public Locale getLocale() { // TODO create locale field. return Locale.getDefault(); } /** * Alias. */ public String getAlias() { return this.alias; } public void setAlias(String alias) { this.alias = alias; } /** * Date of installation. */ public Date getInstallDate() { return installDate; } public void setInstallDate(Date installDate) { this.installDate = installDate; } /** * <<Transient>> True if install date is not null. */ // @Transient public boolean isInstalled() { return getInstallDate()!=null; } public char getEntityType() { return entityType; } public void setEntityType(char entityType) { this.entityType = entityType; } /** * Entity nature. * * <p> * A list of comma separeted literals matching to public entity discriminators. The service layer * must control the life cycle of such public entities following the literals on this string.</p> */ public String getNature() { return nature; } public void setNature(String nature) { this.nature = nature; } /** * <<Transient>> Set nature if it does not exist. * * @param nature */ public void setNatureIfDoesNotExist(char nature) { if (getNature()==null) { setNature(Character.toString(nature)); } else if (getNature().indexOf(nature)==-1) { if (getNatureAsArray().length>0) { setNature(getNature().concat(",")); } setNature(getNature().concat(Character.toString(nature))); } } /** * <<Transient>> True if nature already exists. * * @param nature */ public boolean hasNature(char nature) { return (getNature()!=null && getNature().indexOf(nature)>=0); } /** * <<Transient>> Nature as array. */ public String[] getNatureAsArray() { return StringListUtils.stringToArray(getNature()); } public void setNatureAsArray(String[] natureArray) { setNature(StringListUtils.arrayToString(natureArray)); } /** * Custom colors. * * <p> * Up to 6 colors in the hex format #rrggbb,#rrggbb, etc. * </p> */ public String getCustomColors() { return customColors; } public void setCustomColors(String customColors) { this.customColors = customColors; } /** * <<Transient>> Colors as array. */ public String[] getCustomColorsAsArray() { return StringListUtils.stringToArray(getCustomColors()); } public void setCustomColorsAsArray(String[] customColorsArray) { setCustomColors(StringListUtils.arrayToString(customColorsArray)); } /** * Custom style. */ public String getCustomStyle() { return customStyle; } public void setCustomStyle(String customStyle) { this.customStyle = customStyle; } /** * Custom properties. */ public String getCustomProperties() { return customProperties; } public void setCustomProperties(String customProperties) { this.customProperties = customProperties; } /** * Summary. */ public String getSummary() { return summary; } public void setSummary(String summary) { this.summary = summary; } public Map<String, Object> getCustomPropertiesAsMap() { return StringListUtils.propertiesToMap(getCustomProperties()); } public void setPropertiesAsMap(Map<String, Object> propertyMap) { setCustomProperties(StringListUtils.mapToProperties(propertyMap)); } /** * <<Transient>> Convenient to hold the manager during installation * * <p> * Entity installation requires many steps. Please, check * service layer for installation procedures. * <p> */ public Identity getManager() { return manager; } public void setManager(Identity manager) { this.manager = manager; } /** * Link to an external logo (like http://mysite/img/log). */ public String getExternalLogoUrl() { return externalLogoUrl; } public void setExternalLogoUrl(String externalLogoUrl) { this.externalLogoUrl = externalLogoUrl; } /** * Activity state. */ public char getActivityState() { return activityState; } public void setActivityState(char activityState) { this.activityState = activityState; } public void setActivityStateAsEnum(ActivityState activityState) { this.activityState = activityState.getValue(); } /** * User group set. */ public Set<UserGroup> getUsers() { return users; } public void setUsers(Set<UserGroup> users) { this.users = users; } /** * Public entity set. */ public Set<PublicEntity> getPublicEntities() { return publicEntities; } public void setPublicEntities(Set<PublicEntity> publicEntities) { this.publicEntities = publicEntities; } /** * <<Transient>> User list. */ public List<UserGroup> getUserList() { return userList; } public void setUserList(List<UserGroup> userList) { this.userList = userList; } /** * toString * @return String */ public String toString() { StringBuffer buffer = new StringBuffer(); buffer.append(getClass().getName()).append("@").append(Integer.toHexString(hashCode())).append(" ["); buffer.append("alias").append("='").append(getAlias()).append("' "); buffer.append("]"); return buffer.toString(); } /** * equals */ public boolean equals(Object other) { if ( (this == other ) ) return true; if ( (other == null ) ) return false; if ( !(other instanceof Entity) ) return false; Entity castOther = (Entity) other; return ((this.getOperator()==castOther.getOperator()) || ( this.getOperator()!=null && castOther.getOperator()!=null && this.getOperator().equals(castOther.getOperator()) )) && ((this.getAlias()==castOther.getAlias()) || ( this.getAlias()!=null && castOther.getAlias()!=null && this.getAlias().equals(castOther.getAlias()) )); } /** * hashCode */ public int hashCode() { int result = 17; result = 37 * result + ( getAlias() == null ? 0 : this.getAlias().hashCode() ); return result; } }
helianto-core/src/main/java/org/helianto/core/domain/Entity.java
/* Copyright 2005 I Serv Consultoria Empresarial Ltda. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.helianto.core.domain; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import javax.persistence.Column; import javax.persistence.DiscriminatorColumn; import javax.persistence.DiscriminatorType; import javax.persistence.DiscriminatorValue; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import javax.persistence.UniqueConstraint; import javax.persistence.Version; import org.helianto.core.PropertyMappable; import org.helianto.core.def.ActivityState; import org.helianto.core.domain.type.RootEntity; import org.helianto.core.utils.StringListUtils; import org.helianto.user.domain.User; import org.helianto.user.domain.UserGroup; import org.springframework.format.annotation.DateTimeFormat; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; /** * * <p> * Domain object to represent the logical namespace of a business * organization or individual and provide for proper isolation to * other entities trying to access its related classes. * </p> * <p> * For example, if two equipment sets must be distinguished in * logical spaces to avoid identity collision, they * must be associated to different entities. This is also applicable for many * other domain classes, like accounts, statements, parts, processes, etc. * The <code>Entity</code> is the root for many of such objects and allow * for the desirable isolation between two or more organizations, or even * smaller units within one organization. In other words, an <code>Entity</code> * 'controls' a whole group of domain object instances. * </p> * <p> * A real world entity usually has many related properties, like * address or trade mark. An <code>Entity</code> here, though, is * designed not to hold much information, namely only an unique name. That makes * it flexible enough to be associated to virtually any real world * entity, even individuals. * </p> * <p> * A small footprint is also desirable for some serialization strategies * like Hibernate's (www.hibernate.org) non-lazy loading. * </p> * @author Mauricio Fernandes de Castro * * */ @javax.persistence.Entity @Table(name="core_entity", uniqueConstraints = {@UniqueConstraint(columnNames={"operatorId", "alias"})} ) @Inheritance(strategy = InheritanceType.SINGLE_TABLE) @DiscriminatorColumn( name="type", discriminatorType=DiscriminatorType.CHAR ) @DiscriminatorValue("0") public class Entity implements RootEntity , PropertyMappable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy=GenerationType.AUTO) private int id; @Version private int version; @JsonIgnore @JsonBackReference @ManyToOne @JoinColumn(name="operatorId", nullable=true) private Operator operator; @Column(length=20) private String alias = ""; @DateTimeFormat(style="S-") @Temporal(TemporalType.TIMESTAMP) private Date installDate; private char entityType = 'C'; @Column(length=128) private String nature = ""; @Column(length=128) private String customColors = ""; @Column(length=128) private String customStyle = ""; @Column(length=128) private String customProperties = ""; @Column(length=1024) private String summary = ""; @Transient private Identity manager; @Column(length=128) private String externalLogoUrl = ""; private char activityState = 'A'; @OneToMany(mappedBy="entity") private Set<UserGroup> users = new HashSet<UserGroup>(0); @JsonManagedReference @OneToMany(mappedBy="entity") private Set<PublicEntity> publicEntities = new HashSet<PublicEntity>(0); @Transient private List<UserGroup> userList; /** * Default constructor. */ public Entity() { super(); } /** * Operator constructor. * * @param operator */ public Entity(Operator operator) { this(); setOperator(operator); } /** * Key constructor. * * @param operator * @param alias */ public Entity(Operator operator, String alias) { this(operator); setAlias(alias); setInstallDate(new Date()); } /** * User constructor. * * @param user */ public Entity(User user) { this(user.getOperator()); setManager(user.getIdentity()); } public int getId() { return this.id; } public void setId(int id) { this.id = id; } /** * Version. */ public int getVersion() { return this.version; } public void setVersion(int version) { this.version = version; } /** * Operator, lazy loaded. */ public Operator getOperator() { return this.operator; } public void setOperator(Operator operator) { this.operator = operator; } public int getContextId() { if (getOperator()!=null) { return getOperator().getId(); } return 0; } public Locale getLocale() { // TODO create locale field. return Locale.getDefault(); } /** * Alias. */ public String getAlias() { return this.alias; } public void setAlias(String alias) { this.alias = alias; } /** * Date of installation. */ public Date getInstallDate() { return installDate; } public void setInstallDate(Date installDate) { this.installDate = installDate; } /** * <<Transient>> True if install date is not null. */ // @Transient public boolean isInstalled() { return getInstallDate()!=null; } public char getEntityType() { return entityType; } public void setEntityType(char entityType) { this.entityType = entityType; } /** * Entity nature. * * <p> * A list of comma separeted literals matching to public entity discriminators. The service layer * must control the life cycle of such public entities following the literals on this string.</p> */ public String getNature() { return nature; } public void setNature(String nature) { this.nature = nature; } /** * <<Transient>> Set nature if it does not exist. * * @param nature */ public void setNatureIfDoesNotExist(char nature) { if (getNature()==null) { setNature(Character.toString(nature)); } else if (getNature().indexOf(nature)==-1) { if (getNatureAsArray().length>0) { setNature(getNature().concat(",")); } setNature(getNature().concat(Character.toString(nature))); } } /** * <<Transient>> True if nature already exists. * * @param nature */ public boolean hasNature(char nature) { return (getNature()!=null && getNature().indexOf(nature)>=0); } /** * <<Transient>> Nature as array. */ public String[] getNatureAsArray() { return StringListUtils.stringToArray(getNature()); } public void setNatureAsArray(String[] natureArray) { setNature(StringListUtils.arrayToString(natureArray)); } /** * Custom colors. * * <p> * Up to 6 colors in the hex format #rrggbb,#rrggbb, etc. * </p> */ public String getCustomColors() { return customColors; } public void setCustomColors(String customColors) { this.customColors = customColors; } /** * <<Transient>> Colors as array. */ public String[] getCustomColorsAsArray() { return StringListUtils.stringToArray(getCustomColors()); } public void setCustomColorsAsArray(String[] customColorsArray) { setCustomColors(StringListUtils.arrayToString(customColorsArray)); } /** * Custom style. */ public String getCustomStyle() { return customStyle; } public void setCustomStyle(String customStyle) { this.customStyle = customStyle; } /** * Custom properties. */ public String getCustomProperties() { return customProperties; } public void setCustomProperties(String customProperties) { this.customProperties = customProperties; } /** * Summary. */ public String getSummary() { return summary; } public void setSummary(String summary) { this.summary = summary; } public Map<String, Object> getCustomPropertiesAsMap() { return StringListUtils.propertiesToMap(getCustomProperties()); } public void setPropertiesAsMap(Map<String, Object> propertyMap) { setCustomProperties(StringListUtils.mapToProperties(propertyMap)); } /** * <<Transient>> Convenient to hold the manager during installation * * <p> * Entity installation requires many steps. Please, check * service layer for installation procedures. * <p> */ public Identity getManager() { return manager; } public void setManager(Identity manager) { this.manager = manager; } /** * Link to an external logo (like http://mysite/img/log). */ public String getExternalLogoUrl() { return externalLogoUrl; } public void setExternalLogoUrl(String externalLogoUrl) { this.externalLogoUrl = externalLogoUrl; } /** * Activity state. */ public char getActivityState() { return activityState; } public void setActivityState(char activityState) { this.activityState = activityState; } public void setActivityStateAsEnum(ActivityState activityState) { this.activityState = activityState.getValue(); } /** * User group set. */ public Set<UserGroup> getUsers() { return users; } public void setUsers(Set<UserGroup> users) { this.users = users; } /** * Public entity set. */ public Set<PublicEntity> getPublicEntities() { return publicEntities; } public void setPublicEntities(Set<PublicEntity> publicEntities) { this.publicEntities = publicEntities; } /** * <<Transient>> User list. */ public List<UserGroup> getUserList() { return userList; } public void setUserList(List<UserGroup> userList) { this.userList = userList; } /** * toString * @return String */ public String toString() { StringBuffer buffer = new StringBuffer(); buffer.append(getClass().getName()).append("@").append(Integer.toHexString(hashCode())).append(" ["); buffer.append("alias").append("='").append(getAlias()).append("' "); buffer.append("]"); return buffer.toString(); } /** * equals */ public boolean equals(Object other) { if ( (this == other ) ) return true; if ( (other == null ) ) return false; if ( !(other instanceof Entity) ) return false; Entity castOther = (Entity) other; return ((this.getOperator()==castOther.getOperator()) || ( this.getOperator()!=null && castOther.getOperator()!=null && this.getOperator().equals(castOther.getOperator()) )) && ((this.getAlias()==castOther.getAlias()) || ( this.getAlias()!=null && castOther.getAlias()!=null && this.getAlias().equals(castOther.getAlias()) )); } /** * hashCode */ public int hashCode() { int result = 17; result = 37 * result + ( getAlias() == null ? 0 : this.getAlias().hashCode() ); return result; } }
Polishing imports.
helianto-core/src/main/java/org/helianto/core/domain/Entity.java
Polishing imports.
Java
apache-2.0
40de164a607bc53ca23930c5199ce5fe0dd1ba15
0
pluto-build/pluto
package build.pluto.builder; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import org.sugarj.common.StringCommands; import build.pluto.BuildUnit; import build.pluto.BuildUnit.ModuleVisitor; import build.pluto.dependency.BuildRequirement; import build.pluto.dependency.DuplicateBuildUnitPathException; import build.pluto.dependency.DuplicateFileGenerationException; import build.pluto.dependency.FileRequirement; import build.pluto.dependency.IllegalDependencyException; import build.pluto.dependency.Requirement; import build.pluto.dependency.database.MultiMapDatabase; import build.pluto.output.Output; import build.pluto.util.AbsoluteComparedFile; import build.pluto.util.IReporting; import com.cedarsoftware.util.DeepEquals; import com.cedarsoftware.util.Traverser; public class DynamicAnalysis { private final MultiMapDatabase<File, File> genBy; private final IReporting report; private Map<File, BuildUnit<?>> generatedFiles; private Map<Output, BuildUnit<?>> generatedOutput; public DynamicAnalysis(IReporting report, MultiMapDatabase<File, File> genBy) { this.genBy = genBy; this.report = report; this.generatedFiles = new HashMap<>(); this.generatedOutput = new HashMap<>(); } public void resetAnalysis() throws IOException { genBy.clear(); } public Collection<File> getGenBy(File generated) throws IOException { return genBy.get(generated); } public void reset(BuildUnit<?> unit) throws IOException { if (unit != null) { Set<File> files = unit.getGeneratedFiles(); genBy.removeForEach(files, unit.getPersistentPath()); for (File f : files) { generatedFiles.remove(f); } } } public void check(BuildUnit<?> unit, Integer inputHash) throws IOException { checkInput(unit, inputHash); checkGeneratedFilesOverlap(unit); checkUnitDependency(unit); checkGeneratedOutputs(unit); } /** * The input may not have been changed during the build. */ private void checkInput(BuildUnit<?> unit, Integer inputHash) throws AssertionError { if (inputHash != null && inputHash != DeepEquals.deepHashCode(unit.getGeneratedBy().input)) throw new AssertionError("API Violation detected: Builder mutated its input."); } /** * The build unit must have a unique persistent path and may not * generated files previously generated by another build unit. */ private void checkGeneratedFilesOverlap(BuildUnit<?> unit) throws IOException { BuildUnit<?> other = generatedFiles.put(unit.getPersistentPath(), unit); if (other != null && other != unit) throw new DuplicateBuildUnitPathException("Build unit " + unit + " has same persistent path as build unit " + other); Set<File> files = unit.getGeneratedFiles(); genBy.addForEach(files, unit.getPersistentPath()); for (File file : files) { other = generatedFiles.put(file, unit); if (other != null && other != unit) { BuildRequest<?, ?, ?, ?> unitReq = unit.getGeneratedBy(); BuildRequest<?, ?, ?, ?> otherReq = other.getGeneratedBy(); boolean overlapOK = unitReq.factory.isOverlappingGeneratedFileCompatible( file, unitReq.input, otherReq.factory, otherReq.input); if (!overlapOK) throw new DuplicateFileGenerationException("Build unit " + unit + " generates same file as build unit " + other); } } } /** * When a build unit A requires a file that was generated by a build unit B, * then build unit A must already have a build requirement on build unit B. */ private void checkUnitDependency(BuildUnit<?> unit) { Set<BuildUnit<?>> requiredUnits = new HashSet<>(); for (Requirement req : unit.getRequirements()) { if (req instanceof BuildRequirement<?>) requiredUnits.add(((BuildRequirement<?>) req).getUnit()); else if (req instanceof FileRequirement) { File file = ((FileRequirement) req).file; if (file.exists()) { Collection<File> deps = null; try { deps = genBy.get(file); } catch (IOException e) { report.messageFromSystem("WARNING: Could not verify build-unit dependency due to exception \"" + e.getMessage() + "\" while reading metadata: " + file, true, 0); } if (deps != null && !deps.isEmpty()) { boolean foundDep = false; for (File dep : deps) if (AbsoluteComparedFile.equals(unit.getPersistentPath(), dep)) { foundDep = true; break; } if (!foundDep && deps.size() == 1) foundDep = unit.visit(new IsConnectedTo(deps.iterator().next()), requiredUnits); else if (!foundDep) foundDep = unit.visit(new IsConnectedToAny(deps), requiredUnits); if (!foundDep && deps.size() == 1) throw new IllegalDependencyException(deps, "Build unit " + unit.getPersistentPath() + " has a hidden dependency on file " + file + " without build-unit dependency on " + deps.iterator().next() + ", which generated this file. " + "The builder " + unit.getGeneratedBy().createBuilder().description() + " should " + "mark a dependency to " + deps.iterator().next() + " by `requiring` the corresponding builder."); else if (!foundDep) throw new IllegalDependencyException(deps, "Build unit " + unit.getPersistentPath() + " has a hidden dependency on file " + file + " without build-unit dependency on at least one of [" + StringCommands.printListSeparated(deps, ", ") + "], all " + "of which generated this file. " + "The builder " + unit.getGeneratedBy().createBuilder().description() + " should " + "mark a dependency to one of [" + StringCommands.printListSeparated(deps, ", ") + "] by `requiring` the corresponding" + " builder."); } } } } } /** * A builder must declare build requirements on all builders whose * outputs it uses (including outputs provided via the build input). */ private void checkGeneratedOutputs(final BuildUnit<?> unit) { if (unit.getBuildResult() != null) generatedOutput.put(unit.getBuildResult(), unit); Traverser.traverse(unit.getGeneratedBy().input, new Traverser.Visitor() { @Override public void process(Object o) { if (o instanceof Output) { BuildUnit<?> generator = generatedOutput.get(o); if (generator != null) { File dep = generator.getPersistentPath(); boolean foundDep = AbsoluteComparedFile.equals(unit.getPersistentPath(), dep) || unit.visit(new IsConnectedTo(dep)); if (!foundDep) throw new IllegalDependencyException(Collections.singleton(dep), "Build unit " + dep + " has a hidden dependency on the " + "in-memory output of build unit " + generator + ". " + "The builder " + unit.getGeneratedBy().createBuilder().description() + " should " + "mark a dependency to " + dep + " by `requiring` the corresponding builder."); } } } }); } private static class IsConnectedTo implements ModuleVisitor<Boolean> { private final File requiredUnit; public IsConnectedTo(File requiredUnit) { this.requiredUnit = Objects.requireNonNull(requiredUnit); } @Override public Boolean visit(BuildUnit<?> mod) { return AbsoluteComparedFile.equals(requiredUnit, mod.getPersistentPath()); } @Override public Boolean combine(Boolean t1, Boolean t2) { return t1 || t2; } @Override public Boolean init() { return false; } @Override public boolean cancel(Boolean t) { return t; } } private static class IsConnectedToAny implements ModuleVisitor<Boolean> { private final Set<File> requireAtLeastOne; public IsConnectedToAny(Collection<File> requireAtLeastOne) { Objects.requireNonNull(requireAtLeastOne); this.requireAtLeastOne = new HashSet<File>(); for (File f : requireAtLeastOne) this.requireAtLeastOne.add(f.getAbsoluteFile()); } @Override public Boolean visit(BuildUnit<?> mod) { return requireAtLeastOne.contains(mod.getPersistentPath().getAbsoluteFile()); } @Override public Boolean combine(Boolean t1, Boolean t2) { return t1 || t2; } @Override public Boolean init() { return false; } @Override public boolean cancel(Boolean t) { return t; } } }
src/build/pluto/builder/DynamicAnalysis.java
package build.pluto.builder; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import org.sugarj.common.StringCommands; import build.pluto.BuildUnit; import build.pluto.BuildUnit.ModuleVisitor; import build.pluto.dependency.BuildRequirement; import build.pluto.dependency.DuplicateBuildUnitPathException; import build.pluto.dependency.DuplicateFileGenerationException; import build.pluto.dependency.FileRequirement; import build.pluto.dependency.IllegalDependencyException; import build.pluto.dependency.Requirement; import build.pluto.dependency.database.MultiMapDatabase; import build.pluto.output.Output; import build.pluto.util.AbsoluteComparedFile; import build.pluto.util.IReporting; import com.cedarsoftware.util.DeepEquals; import com.cedarsoftware.util.Traverser; public class DynamicAnalysis { private final MultiMapDatabase<File, File> genBy; private final IReporting report; private Map<File, BuildUnit<?>> generatedFiles; private Map<Output, BuildUnit<?>> generatedOutput; public DynamicAnalysis(IReporting report, MultiMapDatabase<File, File> genBy) { this.genBy = genBy; this.report = report; this.generatedFiles = new HashMap<>(); this.generatedOutput = new HashMap<>(); } public void resetAnalysis() throws IOException { genBy.clear(); } public Collection<File> getGenBy(File generated) throws IOException { return genBy.get(generated); } public void reset(BuildUnit<?> unit) throws IOException { if (unit != null) { Set<File> files = unit.getGeneratedFiles(); genBy.removeForEach(files, unit.getPersistentPath()); for (File f : files) { generatedFiles.remove(f); } } } public void check(BuildUnit<?> unit, Integer inputHash) throws IOException { checkInput(unit, inputHash); checkGeneratedFilesOverlap(unit); checkUnitDependency(unit); checkGeneratedOutputs(unit); } /** * The input may not have been changed during the build. */ private void checkInput(BuildUnit<?> unit, Integer inputHash) throws AssertionError { if (inputHash != null && inputHash != DeepEquals.deepHashCode(unit.getGeneratedBy().input)) throw new AssertionError("API Violation detected: Builder mutated its input."); } /** * The build unit must have a unique persistent path and may not * generated files previously generated by another build unit. */ private void checkGeneratedFilesOverlap(BuildUnit<?> unit) throws IOException { BuildUnit<?> other = generatedFiles.put(unit.getPersistentPath(), unit); if (other != null && other != unit) throw new DuplicateBuildUnitPathException("Build unit " + unit + " has same persistent path as build unit " + other); Set<File> files = unit.getGeneratedFiles(); genBy.addForEach(files, unit.getPersistentPath()); for (File file : files) { other = generatedFiles.put(file, unit); if (other != null && other != unit) { BuildRequest<?, ?, ?, ?> unitReq = unit.getGeneratedBy(); BuildRequest<?, ?, ?, ?> otherReq = other.getGeneratedBy(); boolean overlapOK = unitReq.factory.isOverlappingGeneratedFileCompatible( file, unitReq.input, otherReq.factory, otherReq.input); if (!overlapOK) throw new DuplicateFileGenerationException("Build unit " + unit + " generates same file as build unit " + other); } } } /** * When a build unit A requires a file that was generated by a build unit B, * then build unit A must already have a build requirement on build unit B. */ private void checkUnitDependency(BuildUnit<?> unit) { Set<BuildUnit<?>> requiredUnits = new HashSet<>(); for (Requirement req : unit.getRequirements()) { if (req instanceof BuildRequirement<?>) requiredUnits.add(((BuildRequirement<?>) req).getUnit()); else if (req instanceof FileRequirement) { File file = ((FileRequirement) req).file; if (file.exists()) { Collection<File> deps = null; try { deps = genBy.get(file); } catch (IOException e) { report.messageFromSystem("WARNING: Could not verify build-unit dependency due to exception \"" + e.getMessage() + "\" while reading metadata: " + file, true, 0); } if (deps != null) { boolean foundDep = false; for (File dep : deps) if (AbsoluteComparedFile.equals(unit.getPersistentPath(), dep)) { foundDep = true; break; } if (!foundDep && deps.size() == 1) foundDep = unit.visit(new IsConnectedTo(deps.iterator().next()), requiredUnits); else if (!foundDep) foundDep = unit.visit(new IsConnectedToAny(deps), requiredUnits); if (!foundDep && deps.size() == 1) throw new IllegalDependencyException(deps, "Build unit " + unit.getPersistentPath() + " has a hidden dependency on file " + file + " without build-unit dependency on " + deps.iterator().next() + ", which generated this file. " + "The builder " + unit.getGeneratedBy().createBuilder().description() + " should " + "mark a dependency to " + deps.iterator().next() + " by `requiring` the corresponding builder."); else if (!foundDep) throw new IllegalDependencyException(deps, "Build unit " + unit.getPersistentPath() + " has a hidden dependency on file " + file + " without build-unit dependency on at least one of [" + StringCommands.printListSeparated(deps, ", ") + "], all " + "of which generated this file. " + "The builder " + unit.getGeneratedBy().createBuilder().description() + " should " + "mark a dependency to one of [" + StringCommands.printListSeparated(deps, ", ") + "] by `requiring` the corresponding" + " builder."); } } } } } /** * A builder must declare build requirements on all builders whose * outputs it uses (including outputs provided via the build input). */ private void checkGeneratedOutputs(final BuildUnit<?> unit) { if (unit.getBuildResult() != null) generatedOutput.put(unit.getBuildResult(), unit); Traverser.traverse(unit.getGeneratedBy().input, new Traverser.Visitor() { @Override public void process(Object o) { if (o instanceof Output) { BuildUnit<?> generator = generatedOutput.get(o); if (generator != null) { File dep = generator.getPersistentPath(); boolean foundDep = AbsoluteComparedFile.equals(unit.getPersistentPath(), dep) || unit.visit(new IsConnectedTo(dep)); if (!foundDep) throw new IllegalDependencyException(Collections.singleton(dep), "Build unit " + dep + " has a hidden dependency on the " + "in-memory output of build unit " + generator + ". " + "The builder " + unit.getGeneratedBy().createBuilder().description() + " should " + "mark a dependency to " + dep + " by `requiring` the corresponding builder."); } } } }); } private static class IsConnectedTo implements ModuleVisitor<Boolean> { private final File requiredUnit; public IsConnectedTo(File requiredUnit) { this.requiredUnit = Objects.requireNonNull(requiredUnit); } @Override public Boolean visit(BuildUnit<?> mod) { return AbsoluteComparedFile.equals(requiredUnit, mod.getPersistentPath()); } @Override public Boolean combine(Boolean t1, Boolean t2) { return t1 || t2; } @Override public Boolean init() { return false; } @Override public boolean cancel(Boolean t) { return t; } } private static class IsConnectedToAny implements ModuleVisitor<Boolean> { private final Set<File> requireAtLeastOne; public IsConnectedToAny(Collection<File> requireAtLeastOne) { Objects.requireNonNull(requireAtLeastOne); this.requireAtLeastOne = new HashSet<File>(); for (File f : requireAtLeastOne) this.requireAtLeastOne.add(f.getAbsoluteFile()); } @Override public Boolean visit(BuildUnit<?> mod) { return requireAtLeastOne.contains(mod.getPersistentPath().getAbsoluteFile()); } @Override public Boolean combine(Boolean t1, Boolean t2) { return t1 || t2; } @Override public Boolean init() { return false; } @Override public boolean cancel(Boolean t) { return t; } } }
bugfix: check for empty deps
src/build/pluto/builder/DynamicAnalysis.java
bugfix: check for empty deps
Java
apache-2.0
2e9416e2fad894ee034f5bca95c1e1171d7452b7
0
danilkolikov/cms
package fractal; import de.erichseifert.gral.data.DataTable; import de.erichseifert.gral.plots.XYPlot; import de.erichseifert.gral.plots.lines.DefaultLineRenderer2D; import de.erichseifert.gral.plots.lines.LineRenderer; import de.erichseifert.gral.plots.points.PointRenderer; import de.erichseifert.gral.ui.InteractivePanel; import org.apache.commons.math3.complex.Complex; import javax.swing.*; import java.awt.*; import java.awt.geom.Ellipse2D; /** * Main frame for fractal task * * @author Danil Kolikov */ public class MainFrame extends JFrame { private static final Color red = new Color(1.0f, 0.0f, 0.0f); private static final Color green = new Color(0.0f, 1.0f, 0.0f); private static final Color blue = new Color(0.0f, 0.0f, 1.0f); private DataTable firstRootData = new DataTable(Double.class, Double.class); private DataTable secondRootData = new DataTable(Double.class, Double.class); private DataTable thirdRootData = new DataTable(Double.class, Double.class); private XYPlot plot = new XYPlot(); private InteractivePanel interactivePanel; LineRenderer lineRenderer = new DefaultLineRenderer2D(); private void drawCircle() { DataTable circleData = new DataTable(Double.class, Double.class); for (int i = 0; i <= 720; i++) { double x = Math.cos(i * Math.PI / 360); double y = Math.sin(i * Math.PI / 360); circleData.add(x, y); } plot.add(circleData); plot.setLineRenderers(circleData, lineRenderer); for (PointRenderer pR : plot.getPointRenderers(circleData)) { Shape circle = new Ellipse2D.Double(-4.0, -4.0, 8.0, 8.0); pR.setShape(circle); } } private void drawPoint(Complex point, int pointColor) { switch (pointColor) { case 0: firstRootData.add(point.getReal(), point.getImaginary()); for (PointRenderer pR : plot.getPointRenderers(firstRootData)) { Shape circle = new Ellipse2D.Double(-4.0, -4.0, 8.0, 8.0); pR.setShape(circle); pR.setColor(red); } break; case 1: secondRootData.add(point.getReal(), point.getImaginary()); for (PointRenderer pR : plot.getPointRenderers(secondRootData)) { Shape circle = new Ellipse2D.Double(-4.0, -4.0, 8.0, 8.0); pR.setShape(circle); pR.setColor(green); } break; case 2: thirdRootData.add(point.getReal(), point.getImaginary()); for (PointRenderer pR : plot.getPointRenderers(thirdRootData)) { Shape circle = new Ellipse2D.Double(-4.0, -4.0, 8.0, 8.0); pR.setShape(circle); pR.setColor(blue); } break; } } public MainFrame() throws HeadlessException { setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setMinimumSize(new Dimension(600, 600)); drawCircle(); plot.add(firstRootData); plot.add(secondRootData); plot.add(thirdRootData); interactivePanel = new InteractivePanel(plot); getContentPane().add(interactivePanel); } public static void main(String[] args) { MainFrame mainFrame = new MainFrame(); mainFrame.setTitle("Fractals"); mainFrame.drawPoint(new Complex(0.0f, 0.0f), 0); mainFrame.drawPoint(new Complex(0.5f, 0.5f), 1); mainFrame.drawPoint(new Complex(1.5f, 1.5f), 2); mainFrame.setVisible(true); } }
src/main/java/fractal/MainFrame.java
package fractal; import de.erichseifert.gral.data.DataTable; import de.erichseifert.gral.plots.XYPlot; import de.erichseifert.gral.plots.lines.DefaultLineRenderer2D; import de.erichseifert.gral.plots.lines.LineRenderer; import de.erichseifert.gral.plots.points.PointRenderer; import de.erichseifert.gral.ui.InteractivePanel; import javax.swing.*; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; /** * Main frame for fractal task * * @author Danil Kolikov */ public class MainFrame extends JFrame { public MainFrame() throws HeadlessException { setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setMinimumSize(new Dimension(600, 400)); DataTable data = new DataTable(Double.class, Double.class); XYPlot plot = new XYPlot(); LineRenderer lines = new DefaultLineRenderer2D(); // count data to cos(x) for (double x = -5.0; x <= 5.0; x += 0.25) { double y = Math.cos(x); data.add(x, y); } plot.add(data); // set lines plot.setLineRenderers(data, lines); // set colors Color color = new Color(0.0f, 0.0f, 1.0f); Color red = new Color(1.0f, 0.0f, 0.0f); for (PointRenderer pR : plot.getPointRenderers(data)) { pR.setColor(red); } for (LineRenderer lR : plot.getLineRenderers(data)) { lR.setColor(color); } InteractivePanel interactivePanel = new InteractivePanel(plot); getContentPane().add(interactivePanel); } public static void main(String[] args) { MainFrame mainFrame = new MainFrame(); mainFrame.setTitle("Fractals"); mainFrame.setVisible(true); } }
drawCircle
src/main/java/fractal/MainFrame.java
drawCircle
Java
apache-2.0
62d949c61a0cc814f814a796864da001c0abac59
0
MyersResearchGroup/iBioSim,MyersResearchGroup/iBioSim,MyersResearchGroup/iBioSim
package dynamicsim; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import gnu.trove.iterator.TIntIterator; import gnu.trove.map.hash.TIntDoubleHashMap; import gnu.trove.map.hash.TObjectDoubleHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import gnu.trove.set.hash.TIntHashSet; import javax.swing.JOptionPane; import javax.xml.stream.XMLStreamException; import main.Gui; import org.openmali.FastMath; import org.openmali.FastMath.FRExpResultf; import org.sbml.jsbml.ListOf; import org.sbml.jsbml.LocalParameter; import org.sbml.jsbml.Model; import org.sbml.jsbml.ASTNode; import org.sbml.jsbml.KineticLaw; import org.sbml.jsbml.Reaction; import org.sbml.jsbml.SBMLDocument; import org.sbml.jsbml.SBMLErrorLog; import org.sbml.jsbml.SBMLReader; import org.sbml.jsbml.SpeciesReference; public class DynamicGillespie { //SBML model private Model model = null; //generates random numbers based on the xorshift method XORShiftRandom randomNumberGenerator = null; private HashMap<String, Reaction> reactionToSBMLReactionMap = null; //allows for access to a propensity from a reaction ID private TObjectDoubleHashMap<String> reactionToPropensityMap = null; //allows for access to reactant/product speciesID and stoichiometry from a reaction ID //note that species and stoichiometries need to be thought of as unique for each reaction private HashMap<String, HashSet<StringDoublePair> > reactionToSpeciesAndStoichiometrySetMap = null; //allows for access to reactant/modifier speciesID and stoichiometry from a reaction ID private HashMap<String, HashSet<StringDoublePair> > reactionToReactantStoichiometrySetMap = null; //allows for access to a kinetic formula tree from a reaction private HashMap<String, ASTNode> reactionToFormulaMap = null; //allows for access to a group number from a reaction ID private TObjectIntHashMap<String> reactionToGroupMap = null; //allows for access to a set of reactions that a species is in (as a reactant or modifier) from a species ID private HashMap<String, HashSet<String> > speciesToAffectedReactionSetMap = null; //allows for access to species and parameter values from a variable ID private TObjectDoubleHashMap<String> variableToValueMap = null; //allows for access to a group's min/max propensity from a group ID private TIntDoubleHashMap groupToMaxValueMap = new TIntDoubleHashMap(50); //allows for access to the minimum/maximum possible propensity in the group from a group ID private TIntDoubleHashMap groupToPropensityFloorMap = new TIntDoubleHashMap(50); private TIntDoubleHashMap groupToPropensityCeilingMap = new TIntDoubleHashMap(50); //allows for access to the reactionIDs in a group from a group ID private ArrayList<HashSet<String> > groupToReactionSetList = new ArrayList<HashSet<String> >(50); //stores group numbers that are nonempty private TIntHashSet nonemptyGroupSet = new TIntHashSet(50); //number of groups including the empty groups and zero-propensity group private int numGroups = 0; //propensity variables double totalPropensity = 0.0; double minPropensity = Double.MAX_VALUE; double maxPropensity = Double.MIN_VALUE; public DynamicGillespie() { } /** * simulates the sbml model * * @param SBMLFileName * @param outputDirectory * @param timeLimit * @param maxTimeStep * @param randomSeed */ public void Simulate(String SBMLFileName, String outputDirectory, double timeLimit, double maxTimeStep, long randomSeed) { long timeBeforeSim = System.nanoTime(); //initialization will fail if the SBML model has errors try { if (!Initialize(SBMLFileName, outputDirectory, timeLimit, maxTimeStep, randomSeed)) return; } catch (FileNotFoundException e) { e.printStackTrace(); return; } catch (XMLStreamException e) { e.printStackTrace(); return; } System.err.println("initialization time: " + (System.nanoTime() - timeBeforeSim)/1e9f); //SIMULATION LOOP //simulate until the time limit is reached long step1Time = 0; long step2Time = 0; long step3aTime = 0; long step3bTime = 0; long step4Time = 0; long step5Time = 0; long step6Time = 0; double currentTime = 0.0; while (currentTime <= timeLimit) { //STEP 1: generate random numbers long step1Initial = System.nanoTime(); double r1 = randomNumberGenerator.nextDouble(); double r2 = randomNumberGenerator.nextDouble(); double r3 = randomNumberGenerator.nextDouble(); double r4 = randomNumberGenerator.nextDouble(); step1Time += System.nanoTime() - step1Initial; //STEP 2: calculate delta_t, the time till the next reaction execution long step2Initial = System.nanoTime(); double delta_t = Math.log(1 / r1) / totalPropensity; step2Time += System.nanoTime() - step2Initial; //System.err.println(totalPropensity + " " + currentTime + " " + delta_t + " "); //System.out.println("step 2: time is " + currentTime); System.err.println(numGroups); //STEP 3A: select a group long step3aInitial = System.nanoTime(); //pick a random index, loop through the nonempty groups until that index is reached int randomIndex = (int) Math.floor(r2 * (nonemptyGroupSet.size() - 0.0000001)); int indexIter = 0; TIntIterator nonemptyGroupSetIterator = nonemptyGroupSet.iterator(); while (nonemptyGroupSetIterator.hasNext() && (indexIter < randomIndex)) { //System.out.println("step 3a"); nonemptyGroupSetIterator.next(); ++indexIter; } int selectedGroup = nonemptyGroupSetIterator.next(); //System.err.println(" index: " + randomIndex + " group: " + selectedGroup); step3aTime += System.nanoTime() - step3aInitial; //STEP 3B: select a reaction within the group long step3bInitial = System.nanoTime(); HashSet<String> reactionSet = groupToReactionSetList.get(selectedGroup); randomIndex = (int) Math.floor(r3 * reactionSet.size()); indexIter = 0; Iterator<String> reactionSetIterator = reactionSet.iterator(); while (reactionSetIterator.hasNext() && indexIter < randomIndex) { reactionSetIterator.next(); ++indexIter; } String selectedReactionID = reactionSetIterator.next(); double reactionPropensity = reactionToPropensityMap.get(selectedReactionID); //this is choosing a value between 0 and the max propensity in the group double randomPropensity = r4 * groupToMaxValueMap.get(selectedGroup); //loop until there's no reaction rejection //if the random propensity is higher than the selected reaction's propensity, another random reaction is chosen while (randomPropensity > reactionPropensity) { //System.out.println("step 3b"); //System.out.println(randomPropensity + " " + reactionPropensity); r4 = randomNumberGenerator.nextDouble(); randomIndex = (int) Math.floor(r4 * reactionSet.size()); indexIter = 0; reactionSetIterator = reactionSet.iterator(); while (reactionSetIterator.hasNext() && (indexIter < randomIndex)) { reactionSetIterator.next(); ++indexIter; } selectedReactionID = reactionSetIterator.next(); reactionPropensity = reactionToPropensityMap.get(selectedReactionID); randomPropensity = r4 * groupToMaxValueMap.get(selectedGroup); } step3bTime += System.nanoTime() - step3bInitial; //System.err.println("\nreaction fired: " + selectedReactionID + " propensity: " + reactionPropensity); //STEP 4: perform selected reaction and update species counts long step4Initial = System.nanoTime(); //set of all affected reactions that need propensity updating HashSet<String> totalAffectedReactionSet = new HashSet<String>(20); //loop through the reaction's reactants and products and update their amounts for (StringDoublePair speciesAndStoichiometry : reactionToSpeciesAndStoichiometrySetMap.get(selectedReactionID)) { //System.out.println("step 4"); double stoichiometry = speciesAndStoichiometry.doub; String speciesID = speciesAndStoichiometry.string; //System.out.println(selectedReactionID + " " + speciesID + " " + variableToValueMap.get(speciesID) + " " + stoichiometry); //update the species count variableToValueMap.adjustValue(speciesID, stoichiometry * delta_t); //System.out.println(" " + speciesID + " " + variableToValueMap.get(speciesID)); totalAffectedReactionSet.addAll(speciesToAffectedReactionSetMap.get(speciesID)); } // for (String reaction : reactionToFormulaMap.keySet()) // System.out.println("reactionToFormula Key: " + reaction); // for (String species : speciesToAffectedReactionSetMap.keySet()) // for (String reaction : speciesToAffectedReactionSetMap.get(species)) // System.out.println(species + " " + reaction); step4Time += System.nanoTime() - step4Initial; //STEP 5: compute affected reactions' new propensities and update total propensity //loop through the affected reactions and update the propensities for (String affectedReactionID : totalAffectedReactionSet) { //System.out.println("step 5"); //System.err.println(affectedReactionID + " " + model.getReaction(affectedReactionID.replace("_fd","").replace("_rv","")).getKineticLaw().getFormula()); //System.err.println(reactionToPropensityMap.get(affectedReactionID) + " " + totalAffectedReactionSet.size()); long step5Initial = System.nanoTime(); boolean notEnoughMoleculesFlag = false; HashSet<StringDoublePair> reactantStoichiometrySet = reactionToReactantStoichiometrySetMap.get(affectedReactionID); //check for enough molecules for the reaction to occur for (StringDoublePair speciesAndStoichiometry : reactantStoichiometrySet) { String speciesID = speciesAndStoichiometry.string; double stoichiometry = speciesAndStoichiometry.doub; //this means there aren't enough molecules to satisfy the stoichiometry if (variableToValueMap.get(speciesID) < stoichiometry) { notEnoughMoleculesFlag = true; break; } } double newPropensity = 0.0; if (notEnoughMoleculesFlag == true) newPropensity = 0.0; else newPropensity = CalculatePropensity(reactionToFormulaMap.get(affectedReactionID)); double oldPropensity = reactionToPropensityMap.get(affectedReactionID); //add the difference of new v. old propensity to the total propensity totalPropensity += newPropensity - oldPropensity; //System.err.println(String.valueOf(totalPropensity) + " " + String.valueOf(newPropensity - oldPropensity)); //System.err.println(affectedReactionID + ": " + oldPropensity + " -> " + newPropensity); reactionToPropensityMap.put(affectedReactionID, newPropensity); step5Time += System.nanoTime() - step5Initial; //STEP 6: re-assign affected reactions to appropriate groups long step6Initial = System.nanoTime(); int oldGroup = reactionToGroupMap.get(affectedReactionID); if (newPropensity == 0.0) { HashSet<String> oldReactionSet = groupToReactionSetList.get(oldGroup); //update group collections //zero propensities go into group 0 oldReactionSet.remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, 0); groupToReactionSetList.get(0).add(affectedReactionID); if (oldReactionSet.size() == 0) nonemptyGroupSet.remove(oldGroup); } else if (oldPropensity == 0.0) { int group; if (newPropensity <= minPropensity) { group = 1; minPropensity = newPropensity; ReassignAllReactionsToGroups(); } else { if (newPropensity > maxPropensity) maxPropensity = newPropensity; FRExpResultf frexpResult = FastMath.frexp((float) (newPropensity / minPropensity)); group = frexpResult.exponent; } if (group < numGroups) { HashSet<String> groupReactionSet = groupToReactionSetList.get(group); //update group collections groupToReactionSetList.get(0).remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); groupReactionSet.add(affectedReactionID); //if the group that the reaction was just added to is now nonempty if (groupReactionSet.size() == 1) nonemptyGroupSet.add(group); if (newPropensity > groupToMaxValueMap.get(group)) groupToMaxValueMap.put(group, newPropensity); } //this means the propensity goes into a group that doesn't currently exist else { //groupToReactionSetList is a list, so the group needs to be the index for (int iter = numGroups; iter <= group; ++iter) groupToReactionSetList.add(new HashSet<String>(500)); numGroups = group + 1; //update group collections groupToReactionSetList.get(0).remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); groupToReactionSetList.get(group).add(affectedReactionID); nonemptyGroupSet.add(group); groupToMaxValueMap.put(group, newPropensity); } } else { if (newPropensity > groupToPropensityCeilingMap.get(oldGroup) || newPropensity < groupToPropensityFloorMap.get(oldGroup)) { int group; if (newPropensity <= minPropensity) { group = 1; minPropensity = newPropensity; ReassignAllReactionsToGroups(); } else { if (newPropensity > maxPropensity) maxPropensity = newPropensity; FRExpResultf frexpResult = FastMath.frexp((float) (newPropensity / minPropensity)); group = frexpResult.exponent; } if (group < numGroups) { HashSet<String> newGroupReactionSet = groupToReactionSetList.get(group); HashSet<String> oldGroupReactionSet = groupToReactionSetList.get(oldGroup); //update group collections oldGroupReactionSet.remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); newGroupReactionSet.add(affectedReactionID); //if the group that the reaction was just added to is now nonempty if (newGroupReactionSet.size() == 1) nonemptyGroupSet.add(group); if (oldGroupReactionSet.size() == 0) nonemptyGroupSet.remove(oldGroup); if (newPropensity > groupToMaxValueMap.get(group)) groupToMaxValueMap.put(group, newPropensity); } //this means the propensity goes into a group that doesn't currently exist else { //groupToReactionSetList is a list, so the group needs to be the index for (int iter = numGroups; iter <= group; ++iter) groupToReactionSetList.add(new HashSet<String>(500)); numGroups = group + 1; HashSet<String> oldReactionSet = groupToReactionSetList.get(oldGroup); //update group collections groupToReactionSetList.get(oldGroup).remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); groupToReactionSetList.get(group).add(affectedReactionID); nonemptyGroupSet.add(group); groupToMaxValueMap.put(group, newPropensity); if (oldReactionSet.size() == 0) nonemptyGroupSet.remove(oldGroup); } } else { //maintain current group; do nothing } } step6Time += System.nanoTime() - step6Initial; }//end step 5/6 for loop //update time: choose the smaller of delta_t and the given max timestep //by default, delta_t will always be chosen if (delta_t <= maxTimeStep) currentTime += delta_t; else currentTime += maxTimeStep; } //end simulation loop System.err.println("total time: " + String.valueOf((System.nanoTime() - timeBeforeSim) / 1e9f)); System.err.println("total step 1 time: " + String.valueOf(step1Time / 1e9f)); System.err.println("total step 2 time: " + String.valueOf(step2Time / 1e9f)); System.err.println("total step 3a time: " + String.valueOf(step3aTime / 1e9f)); System.err.println("total step 3b time: " + String.valueOf(step3bTime / 1e9f)); System.err.println("total step 4 time: " + String.valueOf(step4Time / 1e9f)); System.err.println("total step 5 time: " + String.valueOf(step5Time / 1e9f)); System.err.println("total step 6 time: " + String.valueOf(step6Time / 1e9f)); } /** * loads the model and initializes the maps and variables and whatnot * @throws XMLStreamException * @throws FileNotFoundException */ private boolean Initialize(String SBMLFileName, String outputDirectory, double timeLimit, double maxTimeStep, long randomSeed) throws FileNotFoundException, XMLStreamException { randomNumberGenerator = new XORShiftRandom(randomSeed); SBMLReader reader = new SBMLReader(); SBMLDocument document = reader.readSBML(SBMLFileName); SBMLErrorLog errors = document.getListOfErrors(); //if the sbml document has errors, tell the user and don't simulate if (document.getNumErrors() > 0) { String errorString = ""; for (int i = 0; i < errors.getNumErrors(); i++) { errorString += errors.getError(i); } JOptionPane.showMessageDialog(Gui.frame, "The SBML file contains " + document.getNumErrors() + " error(s):\n" + errorString, "SBML Error", JOptionPane.ERROR_MESSAGE); return false; } model = document.getModel(); long numSpecies = model.getNumSpecies(); long numParameters = model.getNumParameters(); long numReactions = model.getNumReactions(); //set initial capacities for collections (1.5 is used to multiply numReactions due to reversible reactions) speciesToAffectedReactionSetMap = new HashMap<String, HashSet<String> >((int) numSpecies); variableToValueMap = new TObjectDoubleHashMap<String>((int) numSpecies + (int) numParameters); reactionToPropensityMap = new TObjectDoubleHashMap<String>((int) (numReactions * 1.5)); reactionToSpeciesAndStoichiometrySetMap = new HashMap<String, HashSet<StringDoublePair> >((int) (numReactions * 1.5)); reactionToReactantStoichiometrySetMap = new HashMap<String, HashSet<StringDoublePair> >((int) (numReactions * 1.5)); reactionToFormulaMap = new HashMap<String, ASTNode>((int) (numReactions * 1.5)); reactionToGroupMap = new TObjectIntHashMap<String>((int) (numReactions * 1.5)); reactionToSBMLReactionMap = new HashMap<String, Reaction>((int) numReactions); //add values to hashmap for easy access to species amounts for (int i = 0; i < numSpecies; ++i) { variableToValueMap.put(model.getSpecies(i).getId(), model.getSpecies(i).getInitialAmount()); speciesToAffectedReactionSetMap.put(model.getSpecies(i).getId(), new HashSet<String>(20)); } //add values to hashmap for easy access to global parameter values //NOTE: the IDs for the parameters and species must be unique, so putting them in the //same hashmap is okay for (int i = 0; i < numParameters; ++i) { variableToValueMap.put(model.getParameter(i).getId(), model.getParameter(i).getValue()); } //STEP 0A: calculate initial propensities (including the total) CalculateInitialPropensities(numReactions); //STEP OB: create and populate initial groups CreateAndPopulateInitialGroups(); return true; } /** * calculates the initial propensities for each reaction in the model * * @param numReactions the number of reactions in the model */ private void CalculateInitialPropensities(long numReactions) { //loop through all reactions and calculate their propensities for (int i = 0; i < numReactions; ++i) { Reaction reaction = model.getReaction(i); String reactionID = reaction.getId(); KineticLaw reactionKineticLaw = reaction.getKineticLaw(); ASTNode reactionFormula = reactionKineticLaw.getMath(); ListOf<LocalParameter> reactionParameters = reactionKineticLaw.getListOfLocalParameters(); boolean notEnoughMoleculesFlagFd = false; boolean notEnoughMoleculesFlagRv = false; boolean notEnoughMoleculesFlag = false; reactionToSBMLReactionMap.put(reactionID, reaction); //put the local parameters into a hashmap for easy access //NOTE: these may overwrite some global parameters but that's fine, //because for each reaction the local parameters are the ones we want //and they're always defined for (int j = 0; j < reactionParameters.size(); ++j) { variableToValueMap.put(reactionParameters.get(j).getId(), reactionParameters.get(j).getValue()); } //if it's a reversible reaction //split into a forward and reverse reaction (based on the minus sign in the middle) //and calculate both propensities if (reaction.getReversible()) { //associate the reaction's reactants/products and their stoichiometries with the reaction ID //this is a reversible reaction, so the stoichiometries are switched for the reverse reaction HashSet<StringDoublePair> speciesAndStoichiometrySetFd = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> speciesAndStoichiometrySetRv = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> reactantStoichiometrySetFd = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> reactantStoichiometrySetRv = new HashSet<StringDoublePair>(); for (int a = 0; a < reaction.getNumReactants(); ++a) { SpeciesReference reactant = reaction.getReactant(a); String reactantID = reactant.getSpecies(); double reactantStoichiometry = reactant.getStoichiometry(); speciesAndStoichiometrySetFd.add(new StringDoublePair(reactantID, -reactantStoichiometry)); speciesAndStoichiometrySetRv.add(new StringDoublePair(reactantID, reactantStoichiometry)); reactantStoichiometrySetFd.add(new StringDoublePair(reactantID, reactantStoichiometry)); //as a reactant, this species affects the reaction in the forward direction speciesToAffectedReactionSetMap.get(reactantID).add(reactionID + "_fd"); //make sure there are enough molecules for this species //(in the reverse direction, molecules aren't subtracted, but added) if (variableToValueMap.get(reactantID) < reactantStoichiometry) notEnoughMoleculesFlagFd = true; } for (int a = 0; a < reaction.getNumProducts(); ++a) { SpeciesReference product = reaction.getProduct(a); String productID = product.getSpecies(); double productStoichiometry = product.getStoichiometry(); speciesAndStoichiometrySetFd.add(new StringDoublePair(productID, productStoichiometry)); speciesAndStoichiometrySetRv.add(new StringDoublePair(productID, -productStoichiometry)); reactantStoichiometrySetRv.add(new StringDoublePair(productID, productStoichiometry)); //as a product, this species affects the reaction in the reverse direction speciesToAffectedReactionSetMap.get(productID).add(reactionID + "_rv"); //make sure there are enough molecules for this species //(in the forward direction, molecules aren't subtracted, but added) if (variableToValueMap.get(productID) < productStoichiometry) notEnoughMoleculesFlagRv = true; } for (int a = 0; a < reaction.getNumModifiers(); ++a) { String modifierID = reaction.getModifier(a).getSpecies(); //as a modifier, this species affects the reaction (in both directions) speciesToAffectedReactionSetMap.get(modifierID).add(reactionID + "_fd"); speciesToAffectedReactionSetMap.get(modifierID).add(reactionID + "_rv"); } reactionToSpeciesAndStoichiometrySetMap.put(reactionID + "_fd", speciesAndStoichiometrySetFd); reactionToSpeciesAndStoichiometrySetMap.put(reactionID + "_rv", speciesAndStoichiometrySetRv); reactionToReactantStoichiometrySetMap.put(reactionID + "_fd", reactantStoichiometrySetFd); reactionToReactantStoichiometrySetMap.put(reactionID + "_rv", reactantStoichiometrySetRv); double propensity; //calculate forward reaction propensity if (notEnoughMoleculesFlagFd == true) propensity = 0.0; else { //the left child is what's left of the minus sign propensity = CalculatePropensity(reactionFormula.getLeftChild()); if (propensity < minPropensity && propensity > 0) minPropensity = propensity; else if (propensity > maxPropensity) maxPropensity = propensity; totalPropensity += propensity; } reactionToPropensityMap.put(reactionID + "_fd", propensity); reactionToFormulaMap.put(reactionID + "_fd", reactionFormula.getLeftChild()); //calculate reverse reaction propensity if (notEnoughMoleculesFlagRv == true) propensity = 0.0; else { //the right child is what's right of the minus sign propensity = CalculatePropensity(reactionFormula.getRightChild()); if (propensity < minPropensity && propensity > 0) minPropensity = propensity; else if (propensity > maxPropensity) maxPropensity = propensity; totalPropensity += propensity; } reactionToPropensityMap.put(reactionID + "_rv", propensity); reactionToFormulaMap.put(reactionID + "_rv", reactionFormula.getRightChild()); } //if it's not a reversible reaction else { //associate the reaction's reactants/products and their stoichiometries with the reaction ID HashSet<StringDoublePair> speciesAndStoichiometrySet = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> reactantAndModifierStoichiometrySet = new HashSet<StringDoublePair>(); for (int a = 0; a < reaction.getNumReactants(); ++a) { SpeciesReference reactant = reaction.getReactant(a); String reactantID = reactant.getSpecies(); double reactantStoichiometry = reactant.getStoichiometry(); speciesAndStoichiometrySet.add(new StringDoublePair(reactantID, -reactantStoichiometry)); reactantAndModifierStoichiometrySet.add(new StringDoublePair(reactantID, reactantStoichiometry)); //as a reactant, this species affects the reaction speciesToAffectedReactionSetMap.get(reactantID).add(reactionID); //make sure there are enough molecules for this species if (variableToValueMap.get(reactantID) < reactantStoichiometry) notEnoughMoleculesFlag = true; } for (int a = 0; a < reaction.getNumProducts(); ++a) { SpeciesReference product = reaction.getProduct(a); speciesAndStoichiometrySet.add(new StringDoublePair(product.getSpecies(), product.getStoichiometry())); //don't need to check if there are enough, because products are added } for (int a = 0; a < reaction.getNumModifiers(); ++a) { String modifierID = reaction.getModifier(a).getSpecies(); //as a modifier, this species affects the reaction speciesToAffectedReactionSetMap.get(modifierID).add(reactionID); //modifiers don't have stoichiometry, so -1.0 is used reactantAndModifierStoichiometrySet.add(new StringDoublePair(modifierID, -1.0)); } reactionToSpeciesAndStoichiometrySetMap.put(reactionID, speciesAndStoichiometrySet); reactionToReactantStoichiometrySetMap.put(reactionID, reactantAndModifierStoichiometrySet); double propensity; if (notEnoughMoleculesFlag == true) propensity = 0.0; else { //calculate propensity propensity = CalculatePropensity(reactionFormula); if (propensity < minPropensity && propensity > 0) minPropensity = propensity; if (propensity > maxPropensity) maxPropensity = propensity; totalPropensity += propensity; } reactionToPropensityMap.put(reactionID, propensity); reactionToFormulaMap.put(reactionID, reactionFormula); } } } /** * creates the appropriate number of groups and associates reactions with groups */ private void CreateAndPopulateInitialGroups() { //create groups int currentGroup = 1; double groupPropensityCeiling = 2 * minPropensity; groupToPropensityFloorMap.put(1, minPropensity); while (groupPropensityCeiling < maxPropensity) { groupToPropensityCeilingMap.put(currentGroup, groupPropensityCeiling); groupToPropensityFloorMap.put(currentGroup + 1, groupPropensityCeiling); groupToMaxValueMap.put(currentGroup, 0.0); groupPropensityCeiling *= 2; ++currentGroup; } groupToPropensityCeilingMap.put(currentGroup, groupPropensityCeiling); groupToMaxValueMap.put(currentGroup, 0.0); numGroups = currentGroup + 1; //start at 0 to make a group for zero propensities for (int groupNum = 0; groupNum < numGroups; ++groupNum) { groupToReactionSetList.add(new HashSet<String>(500)); } //assign reactions to groups for (String reaction : reactionToPropensityMap.keySet()) { double propensity = reactionToPropensityMap.get(reaction); FRExpResultf frexpResult = FastMath.frexp((float) (propensity / minPropensity)); int group = frexpResult.exponent; //System.out.println(reaction + " " + propensity + " " + group); groupToReactionSetList.get(group).add(reaction); reactionToGroupMap.put(reaction, group); if (propensity > groupToMaxValueMap.get(group)) groupToMaxValueMap.put(group, propensity); } //find out which (if any) groups are empty //this is done so that empty groups are never chosen during simulation for (int groupNum = 1; groupNum < numGroups; ++groupNum) { if (groupToReactionSetList.get(groupNum).isEmpty()) continue; nonemptyGroupSet.add(groupNum); } } /** * * @param reactionFormula * @param reactionParameters * @return */ private double CalculatePropensity(ASTNode node) { // if (node == null) { // System.err.println(" is null"); // } // //these if/else-ifs before the else are leaf conditions //if it's a mathematical or logical constant if (node.isConstant()) { switch (node.getType()) { case CONSTANT_E: return Math.E; case CONSTANT_PI: return Math.PI; // case libsbml.AST_CONSTANT_TRUE: // return; // // case libsbml.AST_CONSTANT_FALSE: // return; } } //if it's an integer else if (node.isInteger()) return node.getInteger(); //if it's a non-integer else if (node.isReal()) return node.getReal(); //if it's a user-defined variable //eg, a species name or global/local parameter else if (node.isName()) return variableToValueMap.get(node.getName()); //not a leaf node else { ASTNode leftChild = node.getLeftChild(); ASTNode rightChild = node.getRightChild(); switch(node.getType()) { case PLUS: return (CalculatePropensity(leftChild) + CalculatePropensity(rightChild)); case MINUS: return (CalculatePropensity(leftChild) - CalculatePropensity(rightChild)); case TIMES: return (CalculatePropensity(leftChild) * CalculatePropensity(rightChild)); case DIVIDE: return (CalculatePropensity(leftChild) / CalculatePropensity(rightChild)); case FUNCTION_POWER: return (Math.pow(CalculatePropensity(leftChild), CalculatePropensity(rightChild))); } //end switch } return 0.0; } /** * assigns all reactions to (possibly new) groups * this is called when the minPropensity changes, which * changes the groups' floor/ceiling propensity values */ private void ReassignAllReactionsToGroups() { int currentGroup = 1; double groupPropensityCeiling = 2 * minPropensity; //re-calulate and store group propensity floors/ceilings groupToPropensityCeilingMap.clear(); groupToPropensityFloorMap.clear(); groupToPropensityFloorMap.put(1, minPropensity); while (groupPropensityCeiling < maxPropensity) { groupToPropensityCeilingMap.put(currentGroup, groupPropensityCeiling); groupToPropensityFloorMap.put(currentGroup + 1, groupPropensityCeiling); groupPropensityCeiling *= 2; ++currentGroup; } groupToPropensityCeilingMap.put(currentGroup, groupPropensityCeiling); int newNumGroups = currentGroup + 1; //allocate memory if the number of groups expands if (newNumGroups > numGroups) { for (int groupNum = numGroups; groupNum < newNumGroups; ++groupNum) groupToReactionSetList.add(new HashSet<String>(500)); } //clear the reaction set for each group //start at 1, as the zero propensity group isn't going to change for (int groupNum = 1; groupNum < numGroups; ++groupNum) { groupToReactionSetList.get(groupNum).clear(); groupToMaxValueMap.put(groupNum, 0.0); } numGroups = newNumGroups; //assign reactions to groups for (String reaction : reactionToPropensityMap.keySet()) { double propensity = reactionToPropensityMap.get(reaction); //the zero-propensity group doesn't need altering if (propensity == 0.0) continue; FRExpResultf frexpResult = FastMath.frexp((float) (propensity / minPropensity)); int group = frexpResult.exponent; groupToReactionSetList.get(group).add(reaction); reactionToGroupMap.put(reaction, group); if (propensity > groupToMaxValueMap.get(group)) groupToMaxValueMap.put(group, propensity); } //find out which (if any) groups are empty //this is done so that empty groups are never chosen during simulation nonemptyGroupSet.clear(); for (int groupNum = 1; groupNum < numGroups; ++groupNum) { if (groupToReactionSetList.get(groupNum).isEmpty()) continue; nonemptyGroupSet.add(groupNum); } } /** * class to combine a string and a double */ private class StringDoublePair { public String string; public double doub; StringDoublePair(String s, double d) { string = s; doub = d; } } } /* IMPLEMENTATION NOTES: if the top node of a reversible reaction isn't a minus sign, then give an error modifiers shouldn't determine whether ANY reaction fires --it's taken care of in the kinetic law i think you need to check, for a reversible reaction, which side(s) the modifier is in in the kinetic law --to determine if the modifer affects the reaction for the groupToReactionSetList, see if you can somehow create a hashset, then create an arraylist and each index is a Map.Entry (pointer) from the hashset, allowing you to maintain both simultaneously and easily (i think), and also accessing by index and hashkey in constant time. to get the map.entry you'll have to use a java hashset, but that may be worth it. look at the util sbml formula functions to see what happens with strings --i'm not sure this is still relevant OPTIMIZATION THINGS: look into final and static keywords get rid of the inner class (ie, make it non-inner)? */
gui/src/dynamicsim/DynamicGillespie.java
package dynamicsim; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import gnu.trove.iterator.TIntIterator; import gnu.trove.map.hash.TIntDoubleHashMap; import gnu.trove.map.hash.TObjectDoubleHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import gnu.trove.set.hash.TIntHashSet; import javax.swing.JOptionPane; import javax.xml.stream.XMLStreamException; import main.Gui; import org.openmali.FastMath; import org.openmali.FastMath.FRExpResultf; import org.sbml.jsbml.ListOf; import org.sbml.jsbml.LocalParameter; import org.sbml.jsbml.Model; import org.sbml.jsbml.ASTNode; import org.sbml.jsbml.KineticLaw; import org.sbml.jsbml.Reaction; import org.sbml.jsbml.SBMLDocument; import org.sbml.jsbml.SBMLErrorLog; import org.sbml.jsbml.SBMLReader; import org.sbml.jsbml.SpeciesReference; public class DynamicGillespie { //SBML model private Model model = null; //generates random numbers based on the xorshift method XORShiftRandom randomNumberGenerator = null; private HashMap<String, Reaction> reactionToSBMLReactionMap = null; //allows for access to a propensity from a reaction ID private TObjectDoubleHashMap<String> reactionToPropensityMap = null; //allows for access to reactant/product speciesID and stoichiometry from a reaction ID //note that species and stoichiometries need to be thought of as unique for each reaction private HashMap<String, HashSet<StringDoublePair> > reactionToSpeciesAndStoichiometrySetMap = null; //allows for access to reactant/modifier speciesID and stoichiometry from a reaction ID private HashMap<String, HashSet<StringDoublePair> > reactionToReactantStoichiometrySetMap = null; //allows for access to a kinetic formula tree from a reaction private HashMap<String, ASTNode> reactionToFormulaMap = null; //allows for access to a group number from a reaction ID private TObjectIntHashMap<String> reactionToGroupMap = null; //allows for access to a set of reactions that a species is in (as a reactant or modifier) from a species ID private HashMap<String, HashSet<String> > speciesToAffectedReactionSetMap = null; //allows for access to species and parameter values from a variable ID private TObjectDoubleHashMap<String> variableToValueMap = null; //allows for access to a group's min/max propensity from a group ID private TIntDoubleHashMap groupToMaxValueMap = new TIntDoubleHashMap(50); //allows for access to the minimum/maximum possible propensity in the group from a group ID private TIntDoubleHashMap groupToPropensityFloorMap = new TIntDoubleHashMap(50); private TIntDoubleHashMap groupToPropensityCeilingMap = new TIntDoubleHashMap(50); //allows for access to the reactionIDs in a group from a group ID private ArrayList<HashSet<String> > groupToReactionSetList = new ArrayList<HashSet<String> >(50); //stores group numbers that are nonempty private TIntHashSet nonemptyGroupSet = new TIntHashSet(50); //number of groups including the empty groups and zero-propensity group private int numGroups = 0; //propensity variables double totalPropensity = 0.0; double minPropensity = Double.MAX_VALUE; double maxPropensity = Double.MIN_VALUE; public DynamicGillespie() { } /** * simulates the sbml model * * @param SBMLFileName * @param outputDirectory * @param timeLimit * @param maxTimeStep * @param randomSeed */ public void Simulate(String SBMLFileName, String outputDirectory, double timeLimit, double maxTimeStep, long randomSeed) { long timeBeforeSim = System.nanoTime(); //initialization will fail if the SBML model has errors try { if (!Initialize(SBMLFileName, outputDirectory, timeLimit, maxTimeStep, randomSeed)) return; } catch (FileNotFoundException e) { e.printStackTrace(); return; } catch (XMLStreamException e) { e.printStackTrace(); return; } System.err.println("initialization time: " + (System.nanoTime() - timeBeforeSim)/1e9f); //SIMULATION LOOP //simulate until the time limit is reached long step1Time = 0; long step2Time = 0; long step3aTime = 0; long step3bTime = 0; long step4Time = 0; long step5Time = 0; long step6Time = 0; double currentTime = 0.0; while (currentTime <= timeLimit) { //STEP 1: generate random numbers long step1Initial = System.nanoTime(); double r1 = randomNumberGenerator.nextDouble(); double r2 = randomNumberGenerator.nextDouble(); double r3 = randomNumberGenerator.nextDouble(); double r4 = randomNumberGenerator.nextDouble(); step1Time += System.nanoTime() - step1Initial; //STEP 2: calculate delta_t, the time till the next reaction execution long step2Initial = System.nanoTime(); double delta_t = Math.log(1 / r1) / totalPropensity; step2Time += System.nanoTime() - step2Initial; //System.err.println(totalPropensity + " " + currentTime + " " + delta_t + " "); //System.out.println("step 2: time is " + currentTime); //STEP 3A: select a group long step3aInitial = System.nanoTime(); //pick a random index, loop through the nonempty groups until that index is reached int randomIndex = (int) Math.floor(r2 * (nonemptyGroupSet.size() - 0.0000001)); int indexIter = 0; TIntIterator nonemptyGroupSetIterator = nonemptyGroupSet.iterator(); while (nonemptyGroupSetIterator.hasNext() && (indexIter < randomIndex)) { //System.out.println("step 3a"); nonemptyGroupSetIterator.next(); ++indexIter; } int selectedGroup = nonemptyGroupSetIterator.next(); //System.err.println(" index: " + randomIndex + " group: " + selectedGroup); step3aTime += System.nanoTime() - step3aInitial; //STEP 3B: select a reaction within the group long step3bInitial = System.nanoTime(); //the reaction hashset gets cast to an array in order to index into it randomly HashSet<String> reactionSet = groupToReactionSetList.get(selectedGroup); randomIndex = (int) Math.floor(r3 * reactionSet.size()); indexIter = 0; Iterator<String> reactionSetIterator = reactionSet.iterator(); while (reactionSetIterator.hasNext() && indexIter < randomIndex) { reactionSetIterator.next(); ++indexIter; } String selectedReactionID = reactionSetIterator.next(); double reactionPropensity = reactionToPropensityMap.get(selectedReactionID); //this is choosing a value between 0 and the max propensity in the group double randomPropensity = r4 * groupToMaxValueMap.get(selectedGroup); //loop until there's no reaction rejection //if the random propensity is higher than the selected reaction's propensity, another random reaction is chosen while (randomPropensity > reactionPropensity) { //System.out.println("step 3b"); //System.out.println(randomPropensity + " " + reactionPropensity); r4 = randomNumberGenerator.nextDouble(); randomIndex = (int) Math.floor(r4 * reactionSet.size()); indexIter = 0; reactionSetIterator = reactionSet.iterator(); while (reactionSetIterator.hasNext() && (indexIter < randomIndex)) { reactionSetIterator.next(); ++indexIter; } selectedReactionID = reactionSetIterator.next(); reactionPropensity = reactionToPropensityMap.get(selectedReactionID); randomPropensity = r4 * groupToMaxValueMap.get(selectedGroup); } step3bTime += System.nanoTime() - step3bInitial; //System.err.println("\nreaction fired: " + selectedReactionID + " propensity: " + reactionPropensity); //STEP 4: perform selected reaction and update species counts long step4Initial = System.nanoTime(); //set of all affected reactions that need propensity updating HashSet<String> totalAffectedReactionSet = new HashSet<String>(20); //loop through the reaction's reactants and products and update their amounts for (StringDoublePair speciesAndStoichiometry : reactionToSpeciesAndStoichiometrySetMap.get(selectedReactionID)) { //System.out.println("step 4"); double stoichiometry = speciesAndStoichiometry.doub; String speciesID = speciesAndStoichiometry.string; //System.out.println(selectedReactionID + " " + speciesID + " " + variableToValueMap.get(speciesID) + " " + stoichiometry); //update the species count variableToValueMap.adjustValue(speciesID, stoichiometry * delta_t); //System.out.println(" " + speciesID + " " + variableToValueMap.get(speciesID)); totalAffectedReactionSet.addAll(speciesToAffectedReactionSetMap.get(speciesID)); } // for (String reaction : reactionToFormulaMap.keySet()) // System.out.println("reactionToFormula Key: " + reaction); // for (String species : speciesToAffectedReactionSetMap.keySet()) // for (String reaction : speciesToAffectedReactionSetMap.get(species)) // System.out.println(species + " " + reaction); step4Time += System.nanoTime() - step4Initial; //STEP 5: compute affected reactions' new propensities and update total propensity //loop through the affected reactions and update the propensities for (String affectedReactionID : totalAffectedReactionSet) { //System.out.println("step 5"); //System.err.println(affectedReactionID + " " + model.getReaction(affectedReactionID.replace("_fd","").replace("_rv","")).getKineticLaw().getFormula()); //System.err.println(reactionToPropensityMap.get(affectedReactionID) + " " + totalAffectedReactionSet.size()); long step5Initial = System.nanoTime(); boolean notEnoughMoleculesFlag = false; HashSet<StringDoublePair> reactantStoichiometrySet = reactionToReactantStoichiometrySetMap.get(affectedReactionID); //check for enough molecules for the reaction to occur for (StringDoublePair speciesAndStoichiometry : reactantStoichiometrySet) { String speciesID = speciesAndStoichiometry.string; double stoichiometry = speciesAndStoichiometry.doub; //this means there aren't enough molecules to satisfy the stoichiometry if (variableToValueMap.get(speciesID) < stoichiometry) { notEnoughMoleculesFlag = true; break; } } double newPropensity = 0.0; if (notEnoughMoleculesFlag == true) newPropensity = 0.0; else newPropensity = CalculatePropensity(reactionToFormulaMap.get(affectedReactionID)); double oldPropensity = reactionToPropensityMap.get(affectedReactionID); //add the difference of new v. old propensity to the total propensity totalPropensity += newPropensity - oldPropensity; //System.err.println(String.valueOf(totalPropensity) + " " + String.valueOf(newPropensity - oldPropensity)); //System.err.println(affectedReactionID + ": " + oldPropensity + " -> " + newPropensity); reactionToPropensityMap.put(affectedReactionID, newPropensity); step5Time += System.nanoTime() - step5Initial; //STEP 6: re-assign affected reactions to appropriate groups long step6Initial = System.nanoTime(); int oldGroup = reactionToGroupMap.get(affectedReactionID); if (newPropensity == 0.0) { HashSet<String> oldReactionSet = groupToReactionSetList.get(oldGroup); //update group collections //zero propensities go into group 0 oldReactionSet.remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, 0); groupToReactionSetList.get(0).add(affectedReactionID); if (oldReactionSet.size() == 0) nonemptyGroupSet.remove(oldGroup); } else if (oldPropensity == 0.0) { int group; if (newPropensity <= minPropensity) { group = 1; //minPropensity = newPropensity; // groupToMaxValueMap.clear(); // groupToPropensityFloorMap.clear(); // groupToPropensityCeilingMap.clear(); // groupToReactionSetList = new ArrayList<HashSet<String> >(50); // nonemptyGroupSet.clear(); // // CreateAndPopulateInitialGroups(); } else { if (newPropensity > maxPropensity) maxPropensity = newPropensity; FRExpResultf frexpResult = FastMath.frexp((float) (newPropensity / minPropensity)); group = frexpResult.exponent; } if (group < numGroups) { HashSet<String> newReactionSet = groupToReactionSetList.get(group); //update group collections groupToReactionSetList.get(0).remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); newReactionSet.add(affectedReactionID); //if the group that the reaction was just added to is now nonempty if (newReactionSet.size() == 1) nonemptyGroupSet.add(group); if (newPropensity > groupToMaxValueMap.get(group)) groupToMaxValueMap.put(group, newPropensity); } //this means the propensity goes into a group that doesn't currently exist else { //groupToReactionSetList is a list, so the group needs to be the index for (int iter = numGroups; iter <= group; ++iter) groupToReactionSetList.add(new HashSet<String>(500)); numGroups = group + 1; //update group collections groupToReactionSetList.get(0).remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); groupToReactionSetList.get(group).add(affectedReactionID); nonemptyGroupSet.add(group); groupToMaxValueMap.put(group, newPropensity); } } else { if (newPropensity > groupToPropensityCeilingMap.get(oldGroup) || newPropensity < groupToPropensityFloorMap.get(oldGroup)) { int group; if (newPropensity <= minPropensity) { group = 1; //minPropensity = newPropensity; // groupToMaxValueMap.clear(); // groupToPropensityFloorMap.clear(); // groupToPropensityCeilingMap.clear(); // groupToReactionSetList = new ArrayList<HashSet<String> >(50); // nonemptyGroupSet.clear(); // // CreateAndPopulateInitialGroups(); } else { if (newPropensity > maxPropensity) maxPropensity = newPropensity; FRExpResultf frexpResult = FastMath.frexp((float) (newPropensity / minPropensity)); group = frexpResult.exponent; } if (group < numGroups) { HashSet<String> newReactionSet = groupToReactionSetList.get(group); HashSet<String> oldReactionSet = groupToReactionSetList.get(oldGroup); //update group collections oldReactionSet.remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); newReactionSet.add(affectedReactionID); //if the group that the reaction was just added to is now nonempty if (newReactionSet.size() == 1) nonemptyGroupSet.add(group); if (oldReactionSet.size() == 0) nonemptyGroupSet.remove(oldGroup); if (newPropensity > groupToMaxValueMap.get(group)) groupToMaxValueMap.put(group, newPropensity); } //this means the propensity goes into a group that doesn't currently exist else { //groupToReactionSetList is a list, so the group needs to be the index for (int iter = numGroups; iter <= group; ++iter) groupToReactionSetList.add(new HashSet<String>(500)); numGroups = group + 1; HashSet<String> oldReactionSet = groupToReactionSetList.get(oldGroup); //update group collections groupToReactionSetList.get(oldGroup).remove(affectedReactionID); reactionToGroupMap.put(affectedReactionID, group); groupToReactionSetList.get(group).add(affectedReactionID); nonemptyGroupSet.add(group); groupToMaxValueMap.put(group, newPropensity); if (oldReactionSet.size() == 0) nonemptyGroupSet.remove(oldGroup); } } else { //maintain current group; do nothing } } step6Time += System.nanoTime() - step6Initial; }//end step 5/6 for loop //update time: choose the smaller of delta_t and the given max timestep //by default, delta_t will always be chosen if (delta_t <= maxTimeStep) currentTime += delta_t; else currentTime += maxTimeStep; } //end simulation loop System.err.println("total time: " + String.valueOf((System.nanoTime() - timeBeforeSim)/1e9f)); System.err.println("total step 1 time: " + String.valueOf(step1Time/1e9f)); System.err.println("total step 2 time: " + String.valueOf(step2Time/1e9f)); System.err.println("total step 3a time: " + String.valueOf(step3aTime/1e9f)); System.err.println("total step 3b time: " + String.valueOf(step3bTime/1e9f)); System.err.println("total step 4 time: " + String.valueOf(step4Time/1e9f)); System.err.println("total step 5 time: " + String.valueOf(step5Time/1e9f)); System.err.println("total step 6 time: " + String.valueOf(step6Time/1e9f)); } /** * loads the model and initializes the maps and variables and whatnot * @throws XMLStreamException * @throws FileNotFoundException */ private boolean Initialize(String SBMLFileName, String outputDirectory, double timeLimit, double maxTimeStep, long randomSeed) throws FileNotFoundException, XMLStreamException { randomNumberGenerator = new XORShiftRandom(randomSeed); SBMLReader reader = new SBMLReader(); SBMLDocument document = reader.readSBML(SBMLFileName); SBMLErrorLog errors = document.getListOfErrors(); //if the sbml document has errors, tell the user and don't simulate if (document.getNumErrors() > 0) { String errorString = ""; for (int i = 0; i < errors.getNumErrors(); i++) { errorString += errors.getError(i); } JOptionPane.showMessageDialog(Gui.frame, "The SBML file contains " + document.getNumErrors() + " error(s):\n" + errorString, "SBML Error", JOptionPane.ERROR_MESSAGE); return false; } model = document.getModel(); long numSpecies = model.getNumSpecies(); long numParameters = model.getNumParameters(); long numReactions = model.getNumReactions(); //set initial capacities for collections (1.5 is used to multiply numReactions due to reversible reactions) speciesToAffectedReactionSetMap = new HashMap<String, HashSet<String> >((int) numSpecies); variableToValueMap = new TObjectDoubleHashMap<String>((int) numSpecies + (int) numParameters); reactionToPropensityMap = new TObjectDoubleHashMap<String>((int) (numReactions * 1.5)); reactionToSpeciesAndStoichiometrySetMap = new HashMap<String, HashSet<StringDoublePair> >((int) (numReactions * 1.5)); reactionToReactantStoichiometrySetMap = new HashMap<String, HashSet<StringDoublePair> >((int) (numReactions * 1.5)); reactionToFormulaMap = new HashMap<String, ASTNode>((int) (numReactions * 1.5)); reactionToGroupMap = new TObjectIntHashMap<String>((int) (numReactions * 1.5)); reactionToSBMLReactionMap = new HashMap<String, Reaction>((int) numReactions); //add values to hashmap for easy access to species amounts for (int i = 0; i < numSpecies; ++i) { variableToValueMap.put(model.getSpecies(i).getId(), model.getSpecies(i).getInitialAmount()); speciesToAffectedReactionSetMap.put(model.getSpecies(i).getId(), new HashSet<String>(20)); } //add values to hashmap for easy access to global parameter values //NOTE: the IDs for the parameters and species must be unique, so putting them in the //same hashmap is okay for (int i = 0; i < numParameters; ++i) { variableToValueMap.put(model.getParameter(i).getId(), model.getParameter(i).getValue()); } //STEP 0A: calculate initial propensities (including the total) CalculateInitialPropensities(numReactions); //STEP OB: create and populate initial groups CreateAndPopulateInitialGroups(); return true; } /** * calculates the initial propensities for each reaction in the model * * @param numReactions the number of reactions in the model */ private void CalculateInitialPropensities(long numReactions) { //loop through all reactions and calculate their propensities for (int i = 0; i < numReactions; ++i) { Reaction reaction = model.getReaction(i); String reactionID = reaction.getId(); KineticLaw reactionKineticLaw = reaction.getKineticLaw(); ASTNode reactionFormula = reactionKineticLaw.getMath(); ListOf<LocalParameter> reactionParameters = reactionKineticLaw.getListOfLocalParameters(); boolean notEnoughMoleculesFlagFd = false; boolean notEnoughMoleculesFlagRv = false; boolean notEnoughMoleculesFlag = false; reactionToSBMLReactionMap.put(reactionID, reaction); //put the local parameters into a hashmap for easy access //NOTE: these may overwrite some global parameters but that's fine, //because for each reaction the local parameters are the ones we want //and they're always defined for (int j = 0; j < reactionParameters.size(); ++j) { variableToValueMap.put(reactionParameters.get(j).getId(), reactionParameters.get(j).getValue()); } //if it's a reversible reaction //split into a forward and reverse reaction (based on the minus sign in the middle) //and calculate both propensities if (reaction.getReversible()) { //associate the reaction's reactants/products and their stoichiometries with the reaction ID //this is a reversible reaction, so the stoichiometries are switched for the reverse reaction HashSet<StringDoublePair> speciesAndStoichiometrySetFd = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> speciesAndStoichiometrySetRv = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> reactantStoichiometrySetFd = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> reactantStoichiometrySetRv = new HashSet<StringDoublePair>(); for (int a = 0; a < reaction.getNumReactants(); ++a) { SpeciesReference reactant = reaction.getReactant(a); String reactantID = reactant.getSpecies(); double reactantStoichiometry = reactant.getStoichiometry(); speciesAndStoichiometrySetFd.add(new StringDoublePair(reactantID, -reactantStoichiometry)); speciesAndStoichiometrySetRv.add(new StringDoublePair(reactantID, reactantStoichiometry)); reactantStoichiometrySetFd.add(new StringDoublePair(reactantID, reactantStoichiometry)); //as a reactant, this species affects the reaction in the forward direction speciesToAffectedReactionSetMap.get(reactantID).add(reactionID + "_fd"); //make sure there are enough molecules for this species //(in the reverse direction, molecules aren't subtracted, but added) if (variableToValueMap.get(reactantID) < reactantStoichiometry) notEnoughMoleculesFlagFd = true; } for (int a = 0; a < reaction.getNumProducts(); ++a) { SpeciesReference product = reaction.getProduct(a); String productID = product.getSpecies(); double productStoichiometry = product.getStoichiometry(); speciesAndStoichiometrySetFd.add(new StringDoublePair(productID, productStoichiometry)); speciesAndStoichiometrySetRv.add(new StringDoublePair(productID, -productStoichiometry)); reactantStoichiometrySetRv.add(new StringDoublePair(productID, productStoichiometry)); //as a product, this species affects the reaction in the reverse direction speciesToAffectedReactionSetMap.get(productID).add(reactionID + "_rv"); //make sure there are enough molecules for this species //(in the forward direction, molecules aren't subtracted, but added) if (variableToValueMap.get(productID) < productStoichiometry) notEnoughMoleculesFlagRv = true; } for (int a = 0; a < reaction.getNumModifiers(); ++a) { String modifierID = reaction.getModifier(a).getSpecies(); //as a modifier, this species affects the reaction (in both directions) speciesToAffectedReactionSetMap.get(modifierID).add(reactionID + "_fd"); speciesToAffectedReactionSetMap.get(modifierID).add(reactionID + "_rv"); } reactionToSpeciesAndStoichiometrySetMap.put(reactionID + "_fd", speciesAndStoichiometrySetFd); reactionToSpeciesAndStoichiometrySetMap.put(reactionID + "_rv", speciesAndStoichiometrySetRv); reactionToReactantStoichiometrySetMap.put(reactionID + "_fd", reactantStoichiometrySetFd); reactionToReactantStoichiometrySetMap.put(reactionID + "_rv", reactantStoichiometrySetRv); double propensity; //calculate forward reaction propensity if (notEnoughMoleculesFlagFd == true) propensity = 0.0; else { //the left child is what's left of the minus sign propensity = CalculatePropensity(reactionFormula.getLeftChild()); if (propensity < minPropensity && propensity > 0) minPropensity = propensity; else if (propensity > maxPropensity) maxPropensity = propensity; totalPropensity += propensity; } reactionToPropensityMap.put(reactionID + "_fd", propensity); reactionToFormulaMap.put(reactionID + "_fd", reactionFormula.getLeftChild()); //calculate reverse reaction propensity if (notEnoughMoleculesFlagRv == true) propensity = 0.0; else { //the right child is what's right of the minus sign propensity = CalculatePropensity(reactionFormula.getRightChild()); if (propensity < minPropensity && propensity > 0) minPropensity = propensity; else if (propensity > maxPropensity) maxPropensity = propensity; totalPropensity += propensity; } reactionToPropensityMap.put(reactionID + "_rv", propensity); reactionToFormulaMap.put(reactionID + "_rv", reactionFormula.getRightChild()); } //if it's not a reversible reaction else { //associate the reaction's reactants/products and their stoichiometries with the reaction ID HashSet<StringDoublePair> speciesAndStoichiometrySet = new HashSet<StringDoublePair>(); HashSet<StringDoublePair> reactantAndModifierStoichiometrySet = new HashSet<StringDoublePair>(); for (int a = 0; a < reaction.getNumReactants(); ++a) { SpeciesReference reactant = reaction.getReactant(a); String reactantID = reactant.getSpecies(); double reactantStoichiometry = reactant.getStoichiometry(); speciesAndStoichiometrySet.add(new StringDoublePair(reactantID, -reactantStoichiometry)); reactantAndModifierStoichiometrySet.add(new StringDoublePair(reactantID, reactantStoichiometry)); //as a reactant, this species affects the reaction speciesToAffectedReactionSetMap.get(reactantID).add(reactionID); //make sure there are enough molecules for this species if (variableToValueMap.get(reactantID) < reactantStoichiometry) notEnoughMoleculesFlag = true; } for (int a = 0; a < reaction.getNumProducts(); ++a) { SpeciesReference product = reaction.getProduct(a); speciesAndStoichiometrySet.add(new StringDoublePair(product.getSpecies(), product.getStoichiometry())); //don't need to check if there are enough, because products are added } for (int a = 0; a < reaction.getNumModifiers(); ++a) { String modifierID = reaction.getModifier(a).getSpecies(); //as a modifier, this species affects the reaction speciesToAffectedReactionSetMap.get(modifierID).add(reactionID); //modifiers don't have stoichiometry, so -1.0 is used reactantAndModifierStoichiometrySet.add(new StringDoublePair(modifierID, -1.0)); } reactionToSpeciesAndStoichiometrySetMap.put(reactionID, speciesAndStoichiometrySet); reactionToReactantStoichiometrySetMap.put(reactionID, reactantAndModifierStoichiometrySet); double propensity; if (notEnoughMoleculesFlag == true) propensity = 0.0; else { //calculate propensity propensity = CalculatePropensity(reactionFormula); if (propensity < minPropensity && propensity > 0) minPropensity = propensity; if (propensity > maxPropensity) maxPropensity = propensity; totalPropensity += propensity; } reactionToPropensityMap.put(reactionID, propensity); reactionToFormulaMap.put(reactionID, reactionFormula); } } } /** * creates the appropriate number of groups and associates reactions with groups */ private void CreateAndPopulateInitialGroups() { //create groups int currentGroup = 1; double groupPropensityCeiling = 2 * minPropensity; groupToPropensityFloorMap.put(1, minPropensity); while (groupPropensityCeiling < maxPropensity) { groupToPropensityCeilingMap.put(currentGroup, groupPropensityCeiling); groupToPropensityFloorMap.put(currentGroup + 1, groupPropensityCeiling); groupToMaxValueMap.put(currentGroup, 0.0); groupPropensityCeiling *= 2; ++currentGroup; } groupToPropensityCeilingMap.put(currentGroup, groupPropensityCeiling); groupToMaxValueMap.put(currentGroup, 0.0); numGroups = currentGroup + 1; //start at 0 to make a group for zero propensities for (int j = 0; j < numGroups; ++j) { groupToReactionSetList.add(new HashSet<String>(500)); } //assign reactions to groups for (String reaction : reactionToPropensityMap.keySet()) { double propensity = reactionToPropensityMap.get(reaction); FRExpResultf frexpResult = FastMath.frexp((float) (propensity / minPropensity)); int group = frexpResult.exponent; //System.out.println(reaction + " " + propensity + " " + group); groupToReactionSetList.get(group).add(reaction); reactionToGroupMap.put(reaction, group); if (propensity > groupToMaxValueMap.get(group)) groupToMaxValueMap.put(group, propensity); } //find out which (if any) groups are empty //this is done so that empty groups are never chosen during simulation for (int groupNum = 1; groupNum < numGroups; ++groupNum) { if (groupToReactionSetList.get(groupNum).isEmpty()) continue; nonemptyGroupSet.add(groupNum); } } /** * * @param reactionFormula * @param reactionParameters * @return */ private double CalculatePropensity(ASTNode node) { // if (node == null) { // System.err.println(" is null"); // } // //these if/else-ifs before the else are leaf conditions //if it's a mathematical or logical constant if (node.isConstant()) { switch (node.getType()) { case CONSTANT_E: return Math.E; case CONSTANT_PI: return Math.PI; // case libsbml.AST_CONSTANT_TRUE: // return; // // case libsbml.AST_CONSTANT_FALSE: // return; } } //if it's an integer else if (node.isInteger()) return node.getInteger(); //if it's a non-integer else if (node.isReal()) return node.getReal(); //if it's a user-defined variable //eg, a species name or global/local parameter else if (node.isName()) return variableToValueMap.get(node.getName()); //not a leaf node else { ASTNode leftChild = node.getLeftChild(); ASTNode rightChild = node.getRightChild(); switch(node.getType()) { case PLUS: return (CalculatePropensity(leftChild) + CalculatePropensity(rightChild)); case MINUS: return (CalculatePropensity(leftChild) - CalculatePropensity(rightChild)); case TIMES: return (CalculatePropensity(leftChild) * CalculatePropensity(rightChild)); case DIVIDE: return (CalculatePropensity(leftChild) / CalculatePropensity(rightChild)); case FUNCTION_POWER: return (Math.pow(CalculatePropensity(leftChild), CalculatePropensity(rightChild))); } //end switch } return 0.0; } /** * class to combine a string and a double */ private class StringDoublePair { public String string; public double doub; StringDoublePair(String s, double d) { string = s; doub = d; } } } /* IMPLEMENTATION NOTES: don't forget to deal with new min propensity --re-do all group allocations unless there's an easier way time can go one step beyond the limit (shouldn't happen) if the top node of a reversible reaction isn't a minus sign, then give an error modifiers shouldn't determine whether ANY reaction fires --it's taken care of in the kinetic law i think you need to check, for a reversible reaction, which side(s) the modifier is in in the kinetic law --to determine if the modifer affects the reaction for the groupToReactionSetList, see if you can somehow create a hashset, then create an arraylist and each index is a Map.Entry (pointer) from the hashset, allowing you to maintain both simultaneously and easily (i think), and also accessing by index and hashkey in constant time. to get the map.entry you'll have to use a java hashset, but that may be worth it. look at the util sbml formula functions to see what happens with strings --i'm not sure this is still relevant OPTIMIZATION THINGS: look into final and static keywords get rid of the inner class (ie, make it non-inner)? */
groups, their reactions, and their boundaries change if the minimum propensity changes now
gui/src/dynamicsim/DynamicGillespie.java
groups, their reactions, and their boundaries change if the minimum propensity changes now
Java
apache-2.0
8d14dfbd4cfb59dc65ab0aa89aad91dc43badc79
0
josephcsible/GravityBox
/* * Copyright (C) 2015 The CyanogenMod Project * Copyright (C) 2015 Peter Gregus for GravityBox Project (C3C076@xda) * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ceco.marshmallow.gravitybox.quicksettings; import com.ceco.marshmallow.gravitybox.R; import de.robv.android.xposed.XSharedPreferences; import android.content.Context; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.view.Surface; import android.view.View; import android.view.WindowManager; import android.widget.ImageView; public class CompassTile extends QsTile implements SensorEventListener { private final static float ALPHA = 0.97f; private boolean mActive = false; private Float mNewDegree; private SensorManager mSensorManager; private Sensor mAccelerationSensor; private Sensor mGeomagneticFieldSensor; private WindowManager mWindowManager; private float[] mAcceleration; private float[] mGeomagnetic; private ImageView mImage; private boolean mListeningSensors; private int mCount; private boolean mUpdatePending; public CompassTile(Object host, String key, XSharedPreferences prefs, QsTileEventDistributor eventDistributor) throws Throwable { super(host, key, prefs, eventDistributor); mSensorManager = (SensorManager) mContext.getSystemService(Context.SENSOR_SERVICE); mAccelerationSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); mGeomagneticFieldSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); mWindowManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE); } @Override public void handleDestroy() { super.handleDestroy(); setListeningSensors(false); mSensorManager = null; mAccelerationSensor = null; mGeomagneticFieldSensor = null; mWindowManager = null; mImage = null; } @Override public void onCreateTileView(View tileView) throws Throwable { super.onCreateTileView(tileView); mImage = (ImageView) tileView.findViewById(android.R.id.icon); } @Override public boolean supportsHideOnChange() { return false; } @Override public void handleClick() { mActive = !mActive; refreshState(); setListeningSensors(mActive); super.handleClick(); } private void setListeningSensors(boolean listening) { if (listening == mListeningSensors) return; mListeningSensors = listening; if (mListeningSensors) { mCount = 10; mUpdatePending = false; mSensorManager.registerListener( this, mAccelerationSensor, SensorManager.SENSOR_DELAY_GAME); mSensorManager.registerListener( this, mGeomagneticFieldSensor, SensorManager.SENSOR_DELAY_GAME); } else { mSensorManager.unregisterListener(this); } } @Override public void handleUpdateState(Object state, Object arg) { mState.visible = true; if (mActive) { mState.icon = mGbContext.getDrawable(R.drawable.ic_qs_compass_on); if (mNewDegree != null) { mState.label = formatValueWithCardinalDirection(mNewDegree); float target = getBaseDegree() - mNewDegree; float relative = target - mImage.getRotation(); if (relative > 180) relative -= 360; mImage.setRotation(mImage.getRotation() + relative / 2); } else { mState.label = mGbContext.getString(R.string.quick_settings_compass_init); mImage.setRotation(0); } } else { mState.icon = mGbContext.getDrawable(R.drawable.ic_qs_compass_off); mState.label = mGbContext.getString(R.string.quick_settings_compass_off); mImage.setRotation(0); } mUpdatePending = false; super.handleUpdateState(state, arg); } @Override public void setListening(boolean listening) { if (!listening) { setListeningSensors(false); mActive = false; } } private float getBaseDegree() { switch (mWindowManager.getDefaultDisplay().getRotation()) { default: case Surface.ROTATION_0: return 360f; case Surface.ROTATION_90: return 270f; case Surface.ROTATION_180: return 180f; case Surface.ROTATION_270: return 90f; } } private String formatValueWithCardinalDirection(float degree) { int cardinalDirectionIndex = (int) (Math.floor(((degree - 22.5) % 360) / 45) + 1) % 8; String[] cardinalDirections = mGbContext.getResources().getStringArray( R.array.cardinal_directions); return mGbContext.getString(R.string.quick_settings_compass_value, degree, cardinalDirections[cardinalDirectionIndex]); } @Override public void onSensorChanged(SensorEvent event) { float[] values; if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { if (mAcceleration == null) { mAcceleration = event.values.clone(); } values = mAcceleration; } else { // Magnetic field sensor if (mGeomagnetic == null) { mGeomagnetic = event.values.clone(); } values = mGeomagnetic; } for (int i = 0; i < 3; i++) { values[i] = ALPHA * values[i] + (1 - ALPHA) * event.values[i]; } if (!mActive || !mListeningSensors || mUpdatePending || mAcceleration == null || mGeomagnetic == null) { // Nothing to do at this moment return; } if (mCount++ <= 10) { return; } mCount = 0; float R[] = new float[9]; float I[] = new float[9]; if (!SensorManager.getRotationMatrix(R, I, mAcceleration, mGeomagnetic)) { // Rotation matrix couldn't be calculated return; } // Get the current orientation float[] orientation = new float[3]; SensorManager.getOrientation(R, orientation); // Convert azimuth to degrees mNewDegree = Float.valueOf((float) Math.toDegrees(orientation[0])); mNewDegree = (mNewDegree + 360) % 360; mUpdatePending = true; refreshState(); } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { // noop } }
src/com/ceco/marshmallow/gravitybox/quicksettings/CompassTile.java
/* * Copyright (C) 2015 The CyanogenMod Project * Copyright (C) 2015 Peter Gregus for GravityBox Project (C3C076@xda) * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ceco.marshmallow.gravitybox.quicksettings; import com.ceco.marshmallow.gravitybox.R; import de.robv.android.xposed.XSharedPreferences; import android.content.Context; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.view.Surface; import android.view.View; import android.view.WindowManager; import android.widget.ImageView; public class CompassTile extends QsTile implements SensorEventListener { private final static float ALPHA = 0.97f; private boolean mActive = false; private Float mNewDegree; private SensorManager mSensorManager; private Sensor mAccelerationSensor; private Sensor mGeomagneticFieldSensor; private WindowManager mWindowManager; private float[] mAcceleration; private float[] mGeomagnetic; private ImageView mImage; private boolean mListeningSensors; public CompassTile(Object host, String key, XSharedPreferences prefs, QsTileEventDistributor eventDistributor) throws Throwable { super(host, key, prefs, eventDistributor); mSensorManager = (SensorManager) mContext.getSystemService(Context.SENSOR_SERVICE); mAccelerationSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); mGeomagneticFieldSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); mWindowManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE); } @Override public void handleDestroy() { super.handleDestroy(); setListeningSensors(false); mSensorManager = null; mAccelerationSensor = null; mGeomagneticFieldSensor = null; mWindowManager = null; mImage = null; } @Override public void onCreateTileView(View tileView) throws Throwable { super.onCreateTileView(tileView); mImage = (ImageView) tileView.findViewById(android.R.id.icon); } @Override public boolean supportsHideOnChange() { return false; } @Override public void handleClick() { mActive = !mActive; refreshState(); setListeningSensors(mActive); super.handleClick(); } private void setListeningSensors(boolean listening) { if (listening == mListeningSensors) return; mListeningSensors = listening; if (mListeningSensors) { mSensorManager.registerListener( this, mAccelerationSensor, SensorManager.SENSOR_DELAY_GAME); mSensorManager.registerListener( this, mGeomagneticFieldSensor, SensorManager.SENSOR_DELAY_GAME); } else { mSensorManager.unregisterListener(this); } } @Override public void handleUpdateState(Object state, Object arg) { mState.visible = true; if (mActive) { mState.icon = mGbContext.getDrawable(R.drawable.ic_qs_compass_on); if (mNewDegree != null) { mState.label = formatValueWithCardinalDirection(mNewDegree); float target = getBaseDegree() - mNewDegree; float relative = target - mImage.getRotation(); if (relative > 180) relative -= 360; mImage.setRotation(mImage.getRotation() + relative / 2); } else { mState.label = mGbContext.getString(R.string.quick_settings_compass_init); mImage.setRotation(0); } } else { mState.icon = mGbContext.getDrawable(R.drawable.ic_qs_compass_off); mState.label = mGbContext.getString(R.string.quick_settings_compass_off); mImage.setRotation(0); } super.handleUpdateState(state, arg); } @Override public void setListening(boolean listening) { if (!listening) { setListeningSensors(false); mActive = false; } } private float getBaseDegree() { switch (mWindowManager.getDefaultDisplay().getRotation()) { default: case Surface.ROTATION_0: return 360f; case Surface.ROTATION_90: return 270f; case Surface.ROTATION_180: return 180f; case Surface.ROTATION_270: return 90f; } } private String formatValueWithCardinalDirection(float degree) { int cardinalDirectionIndex = (int) (Math.floor(((degree - 22.5) % 360) / 45) + 1) % 8; String[] cardinalDirections = mGbContext.getResources().getStringArray( R.array.cardinal_directions); return mGbContext.getString(R.string.quick_settings_compass_value, degree, cardinalDirections[cardinalDirectionIndex]); } @Override public void onSensorChanged(SensorEvent event) { float[] values; if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { if (mAcceleration == null) { mAcceleration = event.values.clone(); } values = mAcceleration; } else { // Magnetic field sensor if (mGeomagnetic == null) { mGeomagnetic = event.values.clone(); } values = mGeomagnetic; } for (int i = 0; i < 3; i++) { values[i] = ALPHA * values[i] + (1 - ALPHA) * event.values[i]; } if (!mActive || !mListeningSensors || mAcceleration == null || mGeomagnetic == null) { // Nothing to do at this moment return; } float R[] = new float[9]; float I[] = new float[9]; if (!SensorManager.getRotationMatrix(R, I, mAcceleration, mGeomagnetic)) { // Rotation matrix couldn't be calculated return; } // Get the current orientation float[] orientation = new float[3]; SensorManager.getOrientation(R, orientation); // Convert azimuth to degrees mNewDegree = Float.valueOf((float) Math.toDegrees(orientation[0])); mNewDegree = (mNewDegree + 360) % 360; refreshState(); } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { // noop } }
QS: optimized Compass tile - wait for previous refresh to be finished to avoid clogging up tile's message queue resulting in QS panel stuttering - process every 10th sample to reduce refresh frequency
src/com/ceco/marshmallow/gravitybox/quicksettings/CompassTile.java
QS: optimized Compass tile - wait for previous refresh to be finished to avoid clogging up tile's message queue resulting in QS panel stuttering - process every 10th sample to reduce refresh frequency
Java
apache-2.0
25146d9ec20e33e16b421a9f9ed914eac4bf0668
0
Chicago/opengrid-svc-template
package org.opengrid.security.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.bson.Document; import org.opengrid.data.MongoDBHelper; import org.opengrid.security.OpenGridUserRole; import org.springframework.security.authentication.AccountStatusUserDetailsChecker; import org.springframework.security.core.userdetails.UsernameNotFoundException; import com.mongodb.BasicDBObject; import com.mongodb.client.FindIterable; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; public class MongoUserService implements org.springframework.security.core.userdetails.UserDetailsService { private static final Logger log = Logger.getLogger(MongoUserService.class); private final AccountStatusUserDetailsChecker detailsChecker = new AccountStatusUserDetailsChecker(); private List<Document> allGroups = null; @Override public final User loadUserByUsername(String username) throws UsernameNotFoundException { final User user = getUser(username); if (user == null) { throw new UsernameNotFoundException("User not found"); } detailsChecker.check(user); return user; } private User getUser(String username) { MongoDBHelper ds = new MongoDBHelper(); try { MongoDatabase db = ds.getConnection(); MongoCollection<Document> c = db.getCollection(org.opengrid.constants.DB.USERS_COLLECTION_NAME); BasicDBObject q = new BasicDBObject(); q = new BasicDBObject("userId", new BasicDBObject("$eq", username)); FindIterable<Document> docs = c.find(q); Document doc = docs.first(); if (doc == null) { return null; } else { User u = new User(); u.setUsername(doc.get("userId").toString()); u.setFirstName(doc.get("firstName").toString()); u.setLastName(doc.get("lastName").toString()); //on the LDAP impl, we won't have this u.setPassword(doc.get("password").toString()); //read roles/groups //roles getRoles(u, (ArrayList) doc.get("groups"), db); //read subscriptions later return u; } } finally { if (ds !=null) { ds.closeConnection(); } } } private void getRoles(User u, ArrayList arrayList, MongoDatabase db) { Map<String, String> res = new HashMap<String, String>(); for(Object o : arrayList) { u.grantRole(new OpenGridUserRole( org.opengrid.constants.Security.AUTH_PREFIX_GROUP + (String) o)); //we need to get all resources accessible to this group ArrayList a = getGroupResources(db, (String) o); if (a !=null) { for(Object s : a) { if (!res.containsKey(s)) { res.put((String) s, (String) s); } } } //isAdmin is a system flag (there could be more special flags later depending on the need) if (isAdminGroup((String) o)) { String adminResourceString = org.opengrid.constants.Security.AUTH_PREFIX_RESOURCE + org.opengrid.constants.Security.ADMIN_AUTH; if (!res.containsKey(adminResourceString)) { u.grantRole( new OpenGridUserRole(adminResourceString) ); } } } //add resources to authorities using special prefix for(String s : res.keySet()) { u.grantRole(new OpenGridUserRole( org.opengrid.constants.Security.AUTH_PREFIX_RESOURCE + s)); } } private boolean isAdminGroup(String groupName) { //all ways re-read group info, as this may change a lot //if (allGroups == null) { allGroups = loadGroups(); //} for (Document o: allGroups) { if ( ( (String) o.get("groupId")).equals(groupName)) { boolean b = false; if (o.get("isAdmin") != null) { b = (Boolean) o.get("isAdmin"); } return b; } } return false; } private List<Document> loadGroups() { List<Document> l = new ArrayList<Document>(); MongoDBHelper ds = new MongoDBHelper(); MongoDatabase db = ds.getConnection(); try { MongoCollection<Document> c = db.getCollection(org.opengrid.constants.DB.GROUPS_COLLECTION_NAME); FindIterable<Document> cur = c.find(); MongoCursor<Document> it = cur.iterator(); while(it.hasNext()) { l.add(it.next()); } } finally { if (ds !=null) { ds.closeConnection(); } } return l; } private ArrayList getGroupResources(MongoDatabase db, String groupId) { MongoCollection<Document> c = db.getCollection(org.opengrid.constants.DB.GROUPS_COLLECTION_NAME); BasicDBObject q = new BasicDBObject(); q = new BasicDBObject("groupId", new BasicDBObject("$eq", groupId)); FindIterable<Document> docs = c.find(q); Document doc = docs.first(); if (doc !=null && doc.get("datasets") != null) return (ArrayList) doc.get("datasets"); return null; } public void addUser(User user) { //not implemented } }
opengridservice/src/main/java/org/opengrid/security/impl/MongoUserService.java
package org.opengrid.security.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.bson.Document; import org.opengrid.data.MongoDBHelper; import org.opengrid.security.OpenGridUserRole; import org.springframework.security.authentication.AccountStatusUserDetailsChecker; import org.springframework.security.core.userdetails.UsernameNotFoundException; import com.mongodb.BasicDBObject; import com.mongodb.client.FindIterable; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; public class MongoUserService implements org.springframework.security.core.userdetails.UserDetailsService { private static final Logger log = Logger.getLogger(MongoUserService.class); private final AccountStatusUserDetailsChecker detailsChecker = new AccountStatusUserDetailsChecker(); private List<Document> allGroups = null; @Override public final User loadUserByUsername(String username) throws UsernameNotFoundException { final User user = getUser(username); if (user == null) { throw new UsernameNotFoundException("User not found"); } detailsChecker.check(user); return user; } private User getUser(String username) { MongoDBHelper ds = new MongoDBHelper(); try { MongoDatabase db = ds.getConnection(); MongoCollection<Document> c = db.getCollection(org.opengrid.constants.DB.USERS_COLLECTION_NAME); BasicDBObject q = new BasicDBObject(); q = new BasicDBObject("userId", new BasicDBObject("$eq", username)); FindIterable<Document> docs = c.find(q); Document doc = docs.first(); if (doc == null) { return null; } else { User u = new User(); u.setUsername(doc.get("userId").toString()); u.setFirstName(doc.get("firstName").toString()); u.setLastName(doc.get("lastName").toString()); //on the LDAP impl, we won't have this u.setPassword(doc.get("password").toString()); //read roles/groups //roles getRoles(u, (ArrayList) doc.get("groups"), db); //read subscriptions later return u; } } finally { if (ds !=null) { ds.closeConnection(); } } } private void getRoles(User u, ArrayList arrayList, MongoDatabase db) { Map<String, String> res = new HashMap<String, String>(); for(Object o : arrayList) { u.grantRole(new OpenGridUserRole( org.opengrid.constants.Security.AUTH_PREFIX_GROUP + (String) o)); //we need to get all resources accessible to this group ArrayList a = getGroupResources(db, (String) o); if (a !=null) { for(Object s : a) { if (!res.containsKey(s)) { res.put((String) s, (String) s); } } } //isAdmin is a system flag (there could be more special flags later depending on the need) if (isAdminGroup((String) o)) { String adminResourceString = org.opengrid.constants.Security.AUTH_PREFIX_RESOURCE + org.opengrid.constants.Security.ADMIN_AUTH; if (!res.containsKey(adminResourceString)) { u.grantRole( new OpenGridUserRole(adminResourceString) ); } } } //add resources to authorities using special prefix for(String s : res.keySet()) { u.grantRole(new OpenGridUserRole( org.opengrid.constants.Security.AUTH_PREFIX_RESOURCE + s)); } } private boolean isAdminGroup(String groupName) { //all ways re-read group info, as this may change a lot //if (allGroups == null) { allGroups = loadGroups(); //} for (Document o: allGroups) { if ( ( (String) o.get("groupId")).equals(groupName)) { boolean b = false; if (o.get("isAdmin") != null) { b = (Boolean) o.get("isAdmin"); } return b; } } return false; } private List<Document> loadGroups() { List<Document> l = new ArrayList<Document>(); MongoDBHelper ds = new MongoDBHelper(); MongoDatabase db = ds.getConnection(); try { MongoCollection<Document> c = db.getCollection(org.opengrid.constants.DB.GROUPS_COLLECTION_NAME); FindIterable<Document> cur = c.find(); MongoCursor<Document> it = cur.iterator(); while(it.hasNext()) { l.add(it.next()); } } finally { if (ds !=null) { ds.closeConnection(); } } return l; } private ArrayList getGroupResources(MongoDatabase db, String groupId) { MongoCollection<Document> c = db.getCollection(org.opengrid.constants.DB.GROUPS_COLLECTION_NAME); BasicDBObject q = new BasicDBObject(); q = new BasicDBObject("groupId", new BasicDBObject("$eq", groupId)); FindIterable<Document> docs = c.find(q); Document doc = docs.first(); return (ArrayList) doc.get("datasets"); } public void addUser(User user) { //not implemented } }
Checked for nulls in getGroupResources
opengridservice/src/main/java/org/opengrid/security/impl/MongoUserService.java
Checked for nulls in getGroupResources
Java
apache-2.0
34885ed03b47910ebad992504c0f1f6bc699b52f
0
sopel39/presto,Praveen2112/presto,dabaitu/presto,wyukawa/presto,fiedukow/presto,joy-yao/presto,hulu/presto,fiedukow/presto,ptkool/presto,treasure-data/presto,mvp/presto,svstanev/presto,tellproject/presto,Teradata/presto,lingochamp/presto,sumitkgec/presto,ptkool/presto,suyucs/presto,erichwang/presto,electrum/presto,hgschmie/presto,RobinUS2/presto,cberner/presto,twitter-forks/presto,ebd2/presto,Jimexist/presto,aramesh117/presto,prateek1306/presto,hulu/presto,haitaoyao/presto,stewartpark/presto,suyucs/presto,ebyhr/presto,albertocsm/presto,totticarter/presto,aramesh117/presto,troels/nz-presto,sunchao/presto,raghavsethi/presto,sumitkgec/presto,raghavsethi/presto,aglne/presto,idemura/presto,wrmsr/presto,svstanev/presto,hgschmie/presto,hgschmie/presto,y-lan/presto,smartnews/presto,hulu/presto,mbeitchman/presto,geraint0923/presto,takari/presto,ptkool/presto,geraint0923/presto,mvp/presto,hulu/presto,yuananf/presto,soz-fb/presto,ArturGajowy/presto,geraint0923/presto,prestodb/presto,kietly/presto,elonazoulay/presto,ocono-tech/presto,ocono-tech/presto,electrum/presto,facebook/presto,prateek1306/presto,nezihyigitbasi/presto,totticarter/presto,harunurhan/presto,y-lan/presto,mpilman/presto,Nasdaq/presto,hgschmie/presto,ptkool/presto,RobinUS2/presto,facebook/presto,soz-fb/presto,jf367/presto,twitter-forks/presto,zzhao0/presto,bloomberg/presto,11xor6/presto,mode/presto,aramesh117/presto,youngwookim/presto,losipiuk/presto,tomz/presto,Nasdaq/presto,idemura/presto,mode/presto,TeradataCenterForHadoop/bootcamp,miniway/presto,gh351135612/presto,elonazoulay/presto,prestodb/presto,jxiang/presto,prestodb/presto,miniway/presto,harunurhan/presto,electrum/presto,mvp/presto,RobinUS2/presto,nileema/presto,ocono-tech/presto,wrmsr/presto,martint/presto,dain/presto,prestodb/presto,smartnews/presto,kietly/presto,treasure-data/presto,jiangyifangh/presto,bloomberg/presto,mugglmenzel/presto,mode/presto,haozhun/presto,haozhun/presto,ipros-team/presto,albertocsm/presto,tomz/presto,arhimondr/presto,Yaliang/presto,raghavsethi/presto,zhenyuy-fb/presto,tellproject/presto,zzhao0/presto,aglne/presto,damiencarol/presto,harunurhan/presto,sumitkgec/presto,cosinequanon/presto,soz-fb/presto,idemura/presto,shixuan-fan/presto,hgschmie/presto,yuananf/presto,martint/presto,ebd2/presto,totticarter/presto,EvilMcJerkface/presto,haozhun/presto,lingochamp/presto,mvp/presto,kietly/presto,EvilMcJerkface/presto,Jimexist/presto,Yaliang/presto,mpilman/presto,losipiuk/presto,smartnews/presto,cawallin/presto,RobinUS2/presto,dain/presto,damiencarol/presto,youngwookim/presto,nezihyigitbasi/presto,elonazoulay/presto,soz-fb/presto,propene/presto,ptkool/presto,jiangyifangh/presto,mvp/presto,Jimexist/presto,haitaoyao/presto,wyukawa/presto,svstanev/presto,propene/presto,twitter-forks/presto,sopel39/presto,shixuan-fan/presto,ipros-team/presto,dabaitu/presto,mbeitchman/presto,wrmsr/presto,nileema/presto,haozhun/presto,tomz/presto,martint/presto,EvilMcJerkface/presto,Teradata/presto,nezihyigitbasi/presto,albertocsm/presto,aleph-zero/presto,tomz/presto,ebyhr/presto,dabaitu/presto,troels/nz-presto,aramesh117/presto,youngwookim/presto,nileema/presto,sumitkgec/presto,erichwang/presto,miniway/presto,toyama0919/presto,svstanev/presto,mpilman/presto,treasure-data/presto,martint/presto,miniway/presto,jxiang/presto,zzhao0/presto,prateek1306/presto,haitaoyao/presto,stewartpark/presto,cawallin/presto,twitter-forks/presto,joy-yao/presto,toyama0919/presto,wyukawa/presto,tellproject/presto,sunchao/presto,arhimondr/presto,cberner/presto,troels/nz-presto,youngwookim/presto,sunchao/presto,jxiang/presto,sunchao/presto,mpilman/presto,ocono-tech/presto,arhimondr/presto,prateek1306/presto,TeradataCenterForHadoop/bootcamp,propene/presto,mpilman/presto,Zoomdata/presto,ebyhr/presto,11xor6/presto,Zoomdata/presto,electrum/presto,ArturGajowy/presto,mbeitchman/presto,facebook/presto,elonazoulay/presto,mugglmenzel/presto,lingochamp/presto,gh351135612/presto,wyukawa/presto,losipiuk/presto,idemura/presto,fiedukow/presto,wagnermarkd/presto,sopel39/presto,wrmsr/presto,EvilMcJerkface/presto,damiencarol/presto,Teradata/presto,lingochamp/presto,ebd2/presto,ipros-team/presto,geraint0923/presto,fiedukow/presto,tellproject/presto,haitaoyao/presto,albertocsm/presto,prestodb/presto,suyucs/presto,youngwookim/presto,shixuan-fan/presto,aglne/presto,11xor6/presto,11xor6/presto,Jimexist/presto,miniway/presto,tellproject/presto,kietly/presto,prestodb/presto,shixuan-fan/presto,mbeitchman/presto,ArturGajowy/presto,mode/presto,mode/presto,mandusm/presto,mugglmenzel/presto,chrisunder/presto,losipiuk/presto,zhenyuy-fb/presto,raghavsethi/presto,Nasdaq/presto,zhenyuy-fb/presto,aramesh117/presto,nezihyigitbasi/presto,gh351135612/presto,Praveen2112/presto,y-lan/presto,jxiang/presto,mpilman/presto,ebd2/presto,Jimexist/presto,elonazoulay/presto,stewartpark/presto,wrmsr/presto,erichwang/presto,nileema/presto,aleph-zero/presto,damiencarol/presto,prateek1306/presto,Zoomdata/presto,gh351135612/presto,Praveen2112/presto,cosinequanon/presto,mandusm/presto,Yaliang/presto,wagnermarkd/presto,mugglmenzel/presto,erichwang/presto,cawallin/presto,takari/presto,raghavsethi/presto,Nasdaq/presto,yuananf/presto,cosinequanon/presto,nileema/presto,aleph-zero/presto,Praveen2112/presto,chrisunder/presto,arhimondr/presto,twitter-forks/presto,sopel39/presto,smartnews/presto,martint/presto,jf367/presto,propene/presto,joy-yao/presto,jxiang/presto,treasure-data/presto,dabaitu/presto,ebd2/presto,kietly/presto,arhimondr/presto,jf367/presto,bloomberg/presto,jiangyifangh/presto,zhenyuy-fb/presto,wagnermarkd/presto,Nasdaq/presto,cosinequanon/presto,ArturGajowy/presto,geraint0923/presto,y-lan/presto,Zoomdata/presto,takari/presto,jiangyifangh/presto,stewartpark/presto,ipros-team/presto,haitaoyao/presto,svstanev/presto,ebyhr/presto,losipiuk/presto,fiedukow/presto,mandusm/presto,harunurhan/presto,hulu/presto,yuananf/presto,zhenyuy-fb/presto,cberner/presto,troels/nz-presto,dain/presto,joy-yao/presto,harunurhan/presto,Teradata/presto,takari/presto,gh351135612/presto,toyama0919/presto,sumitkgec/presto,suyucs/presto,zzhao0/presto,treasure-data/presto,ocono-tech/presto,facebook/presto,11xor6/presto,mbeitchman/presto,mugglmenzel/presto,totticarter/presto,y-lan/presto,propene/presto,aleph-zero/presto,Yaliang/presto,electrum/presto,ebyhr/presto,Praveen2112/presto,zzhao0/presto,chrisunder/presto,albertocsm/presto,nezihyigitbasi/presto,stewartpark/presto,wagnermarkd/presto,dain/presto,erichwang/presto,mandusm/presto,Yaliang/presto,toyama0919/presto,EvilMcJerkface/presto,aglne/presto,aleph-zero/presto,Teradata/presto,dabaitu/presto,tomz/presto,joy-yao/presto,RobinUS2/presto,dain/presto,cawallin/presto,ArturGajowy/presto,sopel39/presto,chrisunder/presto,smartnews/presto,cberner/presto,wyukawa/presto,aglne/presto,haozhun/presto,yuananf/presto,facebook/presto,cawallin/presto,mandusm/presto,bloomberg/presto,toyama0919/presto,cosinequanon/presto,suyucs/presto,chrisunder/presto,lingochamp/presto,jiangyifangh/presto,sunchao/presto,ipros-team/presto,bloomberg/presto,takari/presto,TeradataCenterForHadoop/bootcamp,soz-fb/presto,tellproject/presto,treasure-data/presto,Zoomdata/presto,TeradataCenterForHadoop/bootcamp,cberner/presto,totticarter/presto,idemura/presto,wagnermarkd/presto,shixuan-fan/presto,troels/nz-presto,wrmsr/presto,TeradataCenterForHadoop/bootcamp,damiencarol/presto,jf367/presto,jf367/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.execution; import com.facebook.presto.OutputBuffers; import com.facebook.presto.Session; import com.facebook.presto.execution.StateMachine.StateChangeListener; import com.facebook.presto.metadata.Split; import com.facebook.presto.spi.Node; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.StandardErrorCode; import com.facebook.presto.split.RemoteSplit; import com.facebook.presto.sql.planner.PlanFragment; import com.facebook.presto.sql.planner.plan.PlanFragmentId; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.sql.planner.plan.RemoteSourceNode; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import javax.annotation.concurrent.ThreadSafe; import java.net.URI; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static com.facebook.presto.OutputBuffers.INITIAL_EMPTY_OUTPUT_BUFFERS; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Sets.newConcurrentHashSet; import static io.airlift.concurrent.MoreFutures.firstCompletedFuture; import static io.airlift.http.client.HttpUriBuilder.uriBuilderFrom; import static java.util.Objects.requireNonNull; import static java.util.concurrent.CompletableFuture.completedFuture; @ThreadSafe public final class SqlStageExecution { private final StageStateMachine stateMachine; private final RemoteTaskFactory remoteTaskFactory; private final NodeTaskMap nodeTaskMap; private final Map<PlanFragmentId, RemoteSourceNode> exchangeSources; private final Map<Node, Set<RemoteTask>> tasks = new ConcurrentHashMap<>(); private final AtomicInteger nextTaskId = new AtomicInteger(); private final Set<TaskId> allTasks = newConcurrentHashSet(); private final Set<TaskId> finishedTasks = newConcurrentHashSet(); private final Multimap<PlanNodeId, URI> exchangeLocations = HashMultimap.create(); private final Set<PlanNodeId> completeSources = newConcurrentHashSet(); private final Set<PlanFragmentId> completeSourceFragments = newConcurrentHashSet(); private final AtomicReference<OutputBuffers> outputBuffers = new AtomicReference<>(INITIAL_EMPTY_OUTPUT_BUFFERS); public SqlStageExecution( StageId stageId, URI location, PlanFragment fragment, RemoteTaskFactory remoteTaskFactory, Session session, NodeTaskMap nodeTaskMap, ExecutorService executor) { this(new StageStateMachine( requireNonNull(stageId, "stageId is null"), requireNonNull(location, "location is null"), requireNonNull(session, "session is null"), requireNonNull(fragment, "fragment is null"), requireNonNull(executor, "executor is null")), remoteTaskFactory, nodeTaskMap); } public SqlStageExecution(StageStateMachine stateMachine, RemoteTaskFactory remoteTaskFactory, NodeTaskMap nodeTaskMap) { this.stateMachine = stateMachine; this.remoteTaskFactory = requireNonNull(remoteTaskFactory, "remoteTaskFactory is null"); this.nodeTaskMap = requireNonNull(nodeTaskMap, "nodeTaskMap is null"); ImmutableMap.Builder<PlanFragmentId, RemoteSourceNode> fragmentToExchangeSource = ImmutableMap.builder(); for (RemoteSourceNode remoteSourceNode : stateMachine.getFragment().getRemoteSourceNodes()) { for (PlanFragmentId planFragmentId : remoteSourceNode.getSourceFragmentIds()) { fragmentToExchangeSource.put(planFragmentId, remoteSourceNode); } } this.exchangeSources = fragmentToExchangeSource.build(); } public StageId getStageId() { return stateMachine.getStageId(); } public StageState getState() { return stateMachine.getState(); } public void addStateChangeListener(StateChangeListener<StageState> stateChangeListener) { stateMachine.addStateChangeListener(stateChangeListener::stateChanged); } public PlanFragment getFragment() { return stateMachine.getFragment(); } public void beginScheduling() { stateMachine.transitionToScheduling(); } public synchronized void transitionToSchedulingSplits() { stateMachine.transitionToSchedulingSplits(); } public synchronized void schedulingComplete() { if (!stateMachine.transitionToScheduled()) { return; } if (getAllTasks().stream().anyMatch(task -> getState() == StageState.RUNNING)) { stateMachine.transitionToRunning(); } if (finishedTasks.containsAll(allTasks)) { stateMachine.transitionToFinished(); } PlanNodeId partitionedSource = stateMachine.getFragment().getPartitionedSource(); if (partitionedSource != null) { for (RemoteTask task : getAllTasks()) { task.noMoreSplits(partitionedSource); } completeSources.add(partitionedSource); } } public synchronized void cancel() { stateMachine.transitionToCanceled(); getAllTasks().forEach(RemoteTask::cancel); } public synchronized void abort() { stateMachine.transitionToAborted(); getAllTasks().forEach(RemoteTask::abort); } public synchronized long getMemoryReservation() { return getAllTasks().stream() .mapToLong(task -> task.getTaskInfo().getStats().getMemoryReservation().toBytes()) .sum(); } public StageInfo getStageInfo() { return stateMachine.getStageInfo( () -> getAllTasks().stream() .map(RemoteTask::getTaskInfo) .collect(toImmutableList()), ImmutableList::of); } public synchronized void addExchangeLocation(ExchangeLocation exchangeLocation) { requireNonNull(exchangeLocation, "exchangeLocation is null"); RemoteSourceNode remoteSource = exchangeSources.get(exchangeLocation.getPlanFragmentId()); checkArgument(remoteSource != null, "Unknown remote source %s. Known sources are %s", exchangeLocation.getPlanFragmentId(), exchangeSources.keySet()); exchangeLocations.put(remoteSource.getId(), exchangeLocation.getUri()); for (RemoteTask task : getAllTasks()) { task.addSplits(remoteSource.getId(), ImmutableList.of(createRemoteSplitFor(task.getTaskInfo().getTaskId(), exchangeLocation.getUri()))); } } public synchronized void noMoreExchangeLocationsFor(PlanFragmentId fragmentId) { requireNonNull(fragmentId, "fragmentId is null"); RemoteSourceNode remoteSource = exchangeSources.get(fragmentId); checkArgument(remoteSource != null, "Unknown remote source %s. Known sources are %s", fragmentId, exchangeSources.keySet()); completeSourceFragments.add(fragmentId); // is the source now complete? if (completeSourceFragments.containsAll(remoteSource.getSourceFragmentIds())) { completeSources.add(remoteSource.getId()); for (RemoteTask task : getAllTasks()) { task.noMoreSplits(remoteSource.getId()); } } } public synchronized void setOutputBuffers(OutputBuffers outputBuffers) { requireNonNull(outputBuffers, "outputBuffers is null"); while (true) { OutputBuffers currentOutputBuffers = this.outputBuffers.get(); if (outputBuffers.getVersion() <= currentOutputBuffers.getVersion()) { return; } if (this.outputBuffers.compareAndSet(currentOutputBuffers, outputBuffers)) { for (RemoteTask task : getAllTasks()) { task.setOutputBuffers(outputBuffers); } return; } } } // do not synchronize // this is used for query info building which should be independent of scheduling work public boolean hasTasks() { return !tasks.isEmpty(); } public synchronized List<RemoteTask> getAllTasks() { return tasks.values().stream() .flatMap(Set::stream) .collect(toImmutableList()); } public synchronized CompletableFuture<?> getTaskStateChange() { List<RemoteTask> allTasks = getAllTasks(); if (allTasks.isEmpty()) { return completedFuture(null); } List<CompletableFuture<TaskInfo>> stateChangeFutures = allTasks.stream() .map(task -> task.getStateChange(task.getTaskInfo())) .collect(toImmutableList()); return firstCompletedFuture(stateChangeFutures, true); } public synchronized RemoteTask scheduleTask(Node node) { requireNonNull(node, "node is null"); return scheduleTask(node, null, ImmutableList.<Split>of()); } public synchronized Set<RemoteTask> scheduleSplits(Node node, Iterable<Split> splits) { requireNonNull(node, "node is null"); requireNonNull(splits, "splits is null"); PlanNodeId partitionedSource = stateMachine.getFragment().getPartitionedSource(); checkState(partitionedSource != null, "Partitioned source is null"); ImmutableSet.Builder<RemoteTask> newTasks = ImmutableSet.builder(); Collection<RemoteTask> tasks = this.tasks.get(node); if (tasks == null) { newTasks.add(scheduleTask(node, partitionedSource, splits)); } else { RemoteTask task = tasks.iterator().next(); task.addSplits(partitionedSource, splits); } return newTasks.build(); } private synchronized RemoteTask scheduleTask(Node node, PlanNodeId sourceId, Iterable<Split> sourceSplits) { TaskId taskId = new TaskId(stateMachine.getStageId(), String.valueOf(nextTaskId.getAndIncrement())); ImmutableMultimap.Builder<PlanNodeId, Split> initialSplits = ImmutableMultimap.builder(); for (Split sourceSplit : sourceSplits) { initialSplits.put(sourceId, sourceSplit); } for (Entry<PlanNodeId, URI> entry : exchangeLocations.entries()) { initialSplits.put(entry.getKey(), createRemoteSplitFor(taskId, entry.getValue())); } RemoteTask task = remoteTaskFactory.createRemoteTask( stateMachine.getSession(), taskId, node, stateMachine.getFragment(), initialSplits.build(), outputBuffers.get(), nodeTaskMap.getSplitCountChangeListener(node)); completeSources.forEach(task::noMoreSplits); allTasks.add(taskId); tasks.computeIfAbsent(node, key -> newConcurrentHashSet()).add(task); nodeTaskMap.addTask(node, task); task.addStateChangeListener(taskInfo -> { StageState stageState = getState(); if (stageState.isDone()) { return; } TaskState taskState = taskInfo.getState(); if (taskState == TaskState.FAILED) { RuntimeException failure = taskInfo.getFailures().stream() .findFirst() .map(ExecutionFailureInfo::toException) .orElse(new PrestoException(StandardErrorCode.INTERNAL_ERROR, "A task failed for an unknown reason")); stateMachine.transitionToFailed(failure); } else if (taskState == TaskState.ABORTED) { // A task should only be in the aborted state if the STAGE is done (ABORTED or FAILED) stateMachine.transitionToFailed(new PrestoException(StandardErrorCode.INTERNAL_ERROR, "A task is in the ABORTED state but stage is " + stageState)); } else if (taskState == TaskState.FINISHED) { finishedTasks.add(task.getTaskId()); } if (stageState == StageState.SCHEDULED || stageState == StageState.RUNNING) { if (taskState == TaskState.RUNNING) { stateMachine.transitionToRunning(); } if (finishedTasks.containsAll(allTasks)) { stateMachine.transitionToFinished(); } } }); if (!stateMachine.getState().isDone()) { task.start(); } else { // stage finished while we were scheduling this task task.abort(); } return task; } public void recordGetSplitTime(long start) { stateMachine.recordGetSplitTime(start); } private static Split createRemoteSplitFor(TaskId taskId, URI taskLocation) { URI splitLocation = uriBuilderFrom(taskLocation).appendPath("results").appendPath(taskId.toString()).build(); return new Split("remote", new RemoteSplit(splitLocation)); } @Override public String toString() { return stateMachine.toString(); } public static class ExchangeLocation { private final PlanFragmentId planFragmentId; private final URI uri; public ExchangeLocation(PlanFragmentId planFragmentId, URI uri) { this.planFragmentId = requireNonNull(planFragmentId, "planFragmentId is null"); this.uri = requireNonNull(uri, "uri is null"); } public PlanFragmentId getPlanFragmentId() { return planFragmentId; } public URI getUri() { return uri; } @Override public String toString() { return toStringHelper(this) .add("planFragmentId", planFragmentId) .add("uri", uri) .toString(); } } }
presto-main/src/main/java/com/facebook/presto/execution/SqlStageExecution.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.execution; import com.facebook.presto.OutputBuffers; import com.facebook.presto.Session; import com.facebook.presto.execution.StateMachine.StateChangeListener; import com.facebook.presto.metadata.Split; import com.facebook.presto.spi.Node; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.StandardErrorCode; import com.facebook.presto.split.RemoteSplit; import com.facebook.presto.sql.planner.PlanFragment; import com.facebook.presto.sql.planner.plan.PlanFragmentId; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.sql.planner.plan.RemoteSourceNode; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import javax.annotation.concurrent.ThreadSafe; import java.net.URI; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static com.facebook.presto.OutputBuffers.INITIAL_EMPTY_OUTPUT_BUFFERS; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Sets.newConcurrentHashSet; import static io.airlift.concurrent.MoreFutures.firstCompletedFuture; import static io.airlift.http.client.HttpUriBuilder.uriBuilderFrom; import static java.util.Objects.requireNonNull; import static java.util.concurrent.CompletableFuture.completedFuture; @ThreadSafe public final class SqlStageExecution { private final StageStateMachine stateMachine; private final RemoteTaskFactory remoteTaskFactory; private final NodeTaskMap nodeTaskMap; private final Map<PlanFragmentId, RemoteSourceNode> exchangeSources; private final Map<Node, Set<RemoteTask>> tasks = new ConcurrentHashMap<>(); private final AtomicInteger nextTaskId = new AtomicInteger(); private final Set<TaskId> allTasks = newConcurrentHashSet(); private final Set<TaskId> finishedTasks = newConcurrentHashSet(); private final Multimap<PlanNodeId, URI> exchangeLocations = HashMultimap.create(); private final Set<PlanNodeId> completeSources = newConcurrentHashSet(); private final Set<PlanFragmentId> completeSourceFragments = newConcurrentHashSet(); private final AtomicReference<OutputBuffers> outputBuffers = new AtomicReference<>(INITIAL_EMPTY_OUTPUT_BUFFERS); public SqlStageExecution( StageId stageId, URI location, PlanFragment fragment, RemoteTaskFactory remoteTaskFactory, Session session, NodeTaskMap nodeTaskMap, ExecutorService executor) { this(new StageStateMachine( requireNonNull(stageId, "stageId is null"), requireNonNull(location, "location is null"), requireNonNull(session, "session is null"), requireNonNull(fragment, "fragment is null"), requireNonNull(executor, "executor is null")), remoteTaskFactory, nodeTaskMap); } public SqlStageExecution(StageStateMachine stateMachine, RemoteTaskFactory remoteTaskFactory, NodeTaskMap nodeTaskMap) { this.stateMachine = stateMachine; this.remoteTaskFactory = requireNonNull(remoteTaskFactory, "remoteTaskFactory is null"); this.nodeTaskMap = requireNonNull(nodeTaskMap, "nodeTaskMap is null"); ImmutableMap.Builder<PlanFragmentId, RemoteSourceNode> fragmentToExchangeSource = ImmutableMap.builder(); for (RemoteSourceNode remoteSourceNode : stateMachine.getFragment().getRemoteSourceNodes()) { for (PlanFragmentId planFragmentId : remoteSourceNode.getSourceFragmentIds()) { fragmentToExchangeSource.put(planFragmentId, remoteSourceNode); } } this.exchangeSources = fragmentToExchangeSource.build(); } public StageId getStageId() { return stateMachine.getStageId(); } public StageState getState() { return stateMachine.getState(); } public void addStateChangeListener(StateChangeListener<StageState> stateChangeListener) { stateMachine.addStateChangeListener(stateChangeListener::stateChanged); } public PlanFragment getFragment() { return stateMachine.getFragment(); } public void beginScheduling() { stateMachine.transitionToScheduling(); } public synchronized void transitionToSchedulingSplits() { stateMachine.transitionToSchedulingSplits(); } public synchronized void schedulingComplete() { if (!stateMachine.transitionToScheduled()) { return; } if (getAllTasks().stream().anyMatch(task -> getState() == StageState.RUNNING)) { stateMachine.transitionToRunning(); } if (finishedTasks.containsAll(allTasks)) { stateMachine.transitionToFinished(); } PlanNodeId partitionedSource = stateMachine.getFragment().getPartitionedSource(); if (partitionedSource != null) { for (RemoteTask task : getAllTasks()) { task.noMoreSplits(partitionedSource); } completeSources.add(partitionedSource); } } public synchronized void cancel() { stateMachine.transitionToCanceled(); getAllTasks().forEach(RemoteTask::cancel); } public synchronized void abort() { stateMachine.transitionToAborted(); getAllTasks().forEach(RemoteTask::abort); } public synchronized long getMemoryReservation() { return getAllTasks().stream() .mapToLong(task -> task.getTaskInfo().getStats().getMemoryReservation().toBytes()) .sum(); } public StageInfo getStageInfo() { return stateMachine.getStageInfo( () -> getAllTasks().stream() .map(RemoteTask::getTaskInfo) .collect(toImmutableList()), ImmutableList::of); } public synchronized void addExchangeLocation(ExchangeLocation exchangeLocation) { requireNonNull(exchangeLocation, "exchangeLocation is null"); RemoteSourceNode remoteSource = exchangeSources.get(exchangeLocation.getPlanFragmentId()); checkArgument(remoteSource != null, "Unknown remote source %s. Known sources are %s", exchangeLocation.getPlanFragmentId(), exchangeSources.keySet()); exchangeLocations.put(remoteSource.getId(), exchangeLocation.getUri()); for (RemoteTask task : getAllTasks()) { task.addSplits(remoteSource.getId(), ImmutableList.of(createRemoteSplitFor(task.getTaskInfo().getTaskId(), exchangeLocation.getUri()))); } } public synchronized void noMoreExchangeLocationsFor(PlanFragmentId fragmentId) { requireNonNull(fragmentId, "fragmentId is null"); RemoteSourceNode remoteSource = exchangeSources.get(fragmentId); checkArgument(remoteSource != null, "Unknown remote source %s. Known sources are %s", fragmentId, exchangeSources.keySet()); completeSourceFragments.add(fragmentId); // is the source now complete? if (completeSourceFragments.containsAll(remoteSource.getSourceFragmentIds())) { completeSources.add(remoteSource.getId()); for (RemoteTask task : getAllTasks()) { task.noMoreSplits(remoteSource.getId()); } } } public synchronized void setOutputBuffers(OutputBuffers outputBuffers) { requireNonNull(outputBuffers, "outputBuffers is null"); while (true) { OutputBuffers currentOutputBuffers = this.outputBuffers.get(); if (outputBuffers.getVersion() <= currentOutputBuffers.getVersion()) { return; } if (this.outputBuffers.compareAndSet(currentOutputBuffers, outputBuffers)) { for (RemoteTask task : getAllTasks()) { task.setOutputBuffers(outputBuffers); } return; } } } // do not synchronize // this is used for query info building which should be independent of scheduling work public boolean hasTasks() { return !tasks.isEmpty(); } public synchronized List<RemoteTask> getAllTasks() { return tasks.values().stream() .flatMap(Set::stream) .collect(toImmutableList()); } public synchronized CompletableFuture<?> getTaskStateChange() { List<RemoteTask> allTasks = getAllTasks(); if (allTasks.isEmpty()) { return completedFuture(null); } List<CompletableFuture<TaskInfo>> stateChangeFutures = allTasks.stream() .map(task -> task.getStateChange(task.getTaskInfo())) .collect(toImmutableList()); return firstCompletedFuture(stateChangeFutures, true); } public synchronized RemoteTask scheduleTask(Node node) { requireNonNull(node, "node is null"); return scheduleTask(node, null, ImmutableList.<Split>of()); } public synchronized Set<RemoteTask> scheduleSplits(Node node, Iterable<Split> splits) { requireNonNull(node, "node is null"); requireNonNull(splits, "splits is null"); PlanNodeId partitionedSource = stateMachine.getFragment().getPartitionedSource(); checkState(partitionedSource != null, "Partitioned source is null"); ImmutableSet.Builder<RemoteTask> newTasks = ImmutableSet.builder(); Collection<RemoteTask> tasks = this.tasks.get(node); if (tasks == null) { newTasks.add(scheduleTask(node, partitionedSource, splits)); } else { RemoteTask task = tasks.iterator().next(); task.addSplits(partitionedSource, splits); } return newTasks.build(); } private synchronized RemoteTask scheduleTask(Node node, PlanNodeId sourceId, Iterable<Split> sourceSplits) { TaskId taskId = new TaskId(stateMachine.getStageId(), String.valueOf(nextTaskId.getAndIncrement())); ImmutableMultimap.Builder<PlanNodeId, Split> initialSplits = ImmutableMultimap.builder(); for (Split sourceSplit : sourceSplits) { initialSplits.put(sourceId, sourceSplit); } for (Entry<PlanNodeId, URI> entry : exchangeLocations.entries()) { initialSplits.put(entry.getKey(), createRemoteSplitFor(taskId, entry.getValue())); } RemoteTask task = remoteTaskFactory.createRemoteTask( stateMachine.getSession(), taskId, node, stateMachine.getFragment(), initialSplits.build(), outputBuffers.get(), nodeTaskMap.getSplitCountChangeListener(node)); completeSources.forEach(task::noMoreSplits); allTasks.add(taskId); tasks.computeIfAbsent(node, key -> newConcurrentHashSet()).add(task); nodeTaskMap.addTask(node, task); task.addStateChangeListener(taskInfo -> { StageState stageState = getState(); if (stageState.isDone()) { return; } TaskState taskState = taskInfo.getState(); if (taskState == TaskState.FAILED) { RuntimeException failure = taskInfo.getFailures().stream() .findFirst() .map(ExecutionFailureInfo::toException) .orElse(new PrestoException(StandardErrorCode.INTERNAL_ERROR, "A task failed for an unknown reason")); stateMachine.transitionToFailed(failure); } else if (taskState == TaskState.ABORTED) { // A task should only be in the aborted state if the STAGE is done (ABORTED or FAILED) stateMachine.transitionToFailed(new PrestoException(StandardErrorCode.INTERNAL_ERROR, "A task is in the ABORTED state but stage is " + stageState)); } else if (taskState == TaskState.FINISHED) { finishedTasks.add(task.getTaskId()); } if (stageState == StageState.SCHEDULED || stageState == StageState.RUNNING) { if (taskState == TaskState.RUNNING) { stateMachine.transitionToRunning(); } if (finishedTasks.containsAll(allTasks)) { stateMachine.transitionToFinished(); } } }); if (!stateMachine.getState().isDone()) { task.start(); } else { // stage finished while we were scheduling this task task.cancel(); } return task; } public void recordGetSplitTime(long start) { stateMachine.recordGetSplitTime(start); } private static Split createRemoteSplitFor(TaskId taskId, URI taskLocation) { URI splitLocation = uriBuilderFrom(taskLocation).appendPath("results").appendPath(taskId.toString()).build(); return new Split("remote", new RemoteSplit(splitLocation)); } @Override public String toString() { return stateMachine.toString(); } public static class ExchangeLocation { private final PlanFragmentId planFragmentId; private final URI uri; public ExchangeLocation(PlanFragmentId planFragmentId, URI uri) { this.planFragmentId = requireNonNull(planFragmentId, "planFragmentId is null"); this.uri = requireNonNull(uri, "uri is null"); } public PlanFragmentId getPlanFragmentId() { return planFragmentId; } public URI getUri() { return uri; } @Override public String toString() { return toStringHelper(this) .add("planFragmentId", planFragmentId) .add("uri", uri) .toString(); } } }
Fix task leak in coordinator Tasks are leaked when a stage completes before all tasks have been started
presto-main/src/main/java/com/facebook/presto/execution/SqlStageExecution.java
Fix task leak in coordinator
Java
apache-2.0
7151aae68225e7b099e31a6d6e781701a9532dfc
0
thisisvoa/mule-transport-netty-publisher,Ricston/mule-transport-netty-publisher
package com.ricston.nettypublisher; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.mule.api.annotations.Configurable; import org.mule.api.annotations.Connector; import org.mule.api.annotations.Processor; import org.mule.api.annotations.Source; import org.mule.api.annotations.lifecycle.Start; import org.mule.api.annotations.lifecycle.Stop; import org.mule.api.annotations.param.Default; import org.mule.api.callback.SourceCallback; import com.ricston.nettypublisher.exception.UnsupportedDataTypeException; import com.ricston.nettypublisher.handlers.NettyPublisherHandler; import com.ricston.nettypublisher.handlers.NettySourceHandler; /** * Netty connector. * * This connector is designed to host highly concurrent asynchronous TCP servers * using the Netty library. * * A new feature to perform pub/sub connections was also added. * * @author Ricston Ltd. */ @Connector(name = "nettypublisher", schemaVersion = "1.0-SNAPSHOT", friendlyName="Netty Publisher") public class NettyPublisherConnector { protected Log logger = LogFactory.getLog(getClass()); /** * Map of publisher names and their port numbers. Configurable from the XML. */ @Configurable private Map<String, Integer> publishers; /** * A NettyChannelInfo with a list of handlers for each publisher. Publishers are keyed by name. */ protected Map<String, NettyChannelInfo<NettyPublisherHandler>> publisherHandlers = new HashMap<String, NettyChannelInfo<NettyPublisherHandler>>(); /** * NettyChannelInfo that contains a list of source handles. */ protected NettyChannelInfo<NettySourceHandler> sourceHandlers = new NettyChannelInfo<NettySourceHandler>(); protected boolean initialised = false; /** * * @return The map of publishers and their port numbers */ public Map<String, Integer> getPublishers() { return publishers; } /** * * @param publishers The map of publishers and their port numbers */ public void setPublishers(Map<String, Integer> publishers) { this.publishers = publishers; } /** * Start all pub/sub servers. * * @throws InterruptedException */ @Start public synchronized void init() throws InterruptedException { logger.info("Initialising Netty Publisher"); if (!initialised) { startPublisherServers(); initialised = true; logger.info("Netty Publisher initialised"); } else { logger.info("Netty Publisher was already initialised"); } } /** * Close all servers (inbound endpoints) and pub/sub servers * * @throws InterruptedException Exception while stopping servers */ @Stop public void destroy() throws InterruptedException { logger.info("Stopping Netty Publisher"); stopAllServers(); } /** * Start all pub/sub servers. * * @throws InterruptedException */ protected void startPublisherServers() throws InterruptedException { if (publishers != null) { for (Map.Entry<String, Integer> publisher : publishers.entrySet()) { // get the publisher name and port which would have been configured in the XML String publisherName = publisher.getKey(); Integer port = publisher.getValue(); // start the server as publisher NettyChannelInfo<NettyPublisherHandler> channelInfo = NettyUtils.startServer(port, ServerType.PUBLISHER, null, new ArrayList<NettyPublisherHandler>()); publisherHandlers.put(publisherName, channelInfo); logger.info("Netty server started publisher on port " + port); } } } /** * Close all servers (inbound endpoints) and pub/sub servers * @throws InterruptedException Exception while stopping */ protected void stopAllServers() throws InterruptedException { //close all servers (inbound endpoints) and associated connections sourceHandlers.closeAll(); logger.info("Netty server closing listener"); //close all publishers and the connections associated with each publisher for (Map.Entry<String, NettyChannelInfo<NettyPublisherHandler>> channelInfo : publisherHandlers.entrySet()) { channelInfo.getValue().closeAll(); logger.info("Netty server closing publisher"); } } /** * Publishes the data on all clients connected on the publisher. * * {@sample.xml ../../../doc/NettyPublisher-connector.xml.sample nettypublisher:publish} * * @param data Content to be published * @param publisher The publisher to publish the data on * @throws UnsupportedDataTypeException thrown when data type to be written is not supported */ @Processor public void publish(String publisher, @Default(value="#[payload]") String data) throws UnsupportedDataTypeException { List<NettyPublisherHandler> publishers = publisherHandlers.get(publisher).getChannelInboundHandlers(); for(NettyPublisherHandler publisherHandler : publishers) { publisherHandler.publish(data); } } /** * Hosts a netty server and starts listening on the configured port. * * {@sample.xml../../../doc/NettyPublisher-connector.xml.sample nettypublisher:server} * * @param callback the flow's message processors' callback * @param port The port number * @throws InterruptedException If interrupted, an exception is thrown */ @Source public void server(Integer port, SourceCallback callback) throws InterruptedException { NettyUtils.startServer(port, ServerType.SOURCE, callback, new ArrayList<NettySourceHandler>()); logger.info("Netty server started listening on port " + port); } /** * To be used as a TCP client. Writes data to a TCP server * * {@sample.xml../../../doc/NettyPublisher-connector.xml.sample nettypublisher:write} * @param host The host of the server * @param port The port of the server * @param data The data to be written * @throws Exception Anything that goes wrong */ @Processor public void write(String host, Integer port, @Default(value="#[message.payload]") String data) throws Exception { NettyClientHandler clientHandler = new NettyClientHandler(); NettyChannelInfo channelInfo = NettyUtils.startClient(host, port, clientHandler); clientHandler.writeToServer(data); clientHandler.close(); channelInfo.getWorkerGroup().shutdownGracefully(); } }
src/main/java/com/ricston/nettypublisher/NettyPublisherConnector.java
/** * This file was automatically generated by the Mule Development Kit */ package com.ricston.nettypublisher; import org.mule.api.ConnectionException; import org.mule.api.annotations.Configurable; import org.mule.api.annotations.Connect; import org.mule.api.annotations.ConnectionIdentifier; import org.mule.api.annotations.Connector; import org.mule.api.annotations.Disconnect; import org.mule.api.annotations.Processor; import org.mule.api.annotations.Source; import org.mule.api.annotations.ValidateConnection; import org.mule.api.annotations.lifecycle.Start; import org.mule.api.annotations.lifecycle.Stop; import org.mule.api.annotations.param.ConnectionKey; import org.mule.api.annotations.param.Default; import org.mule.api.annotations.param.Optional; import org.mule.api.callback.SourceCallback; import com.ricston.nettypublisher.exception.UnsupportedDataTypeException; import com.ricston.nettypublisher.handlers.NettyPublisherHandler; import com.ricston.nettypublisher.handlers.NettySourceHandler; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Netty connector. * * This connector is designed to host highly concurrent asynchronous TCP servers * using the Netty library. * * A new feature to perform pub/sub connections was also added. * * @author Ricston Ltd. */ @Connector(name = "nettypublisher", schemaVersion = "1.0-SNAPSHOT", friendlyName="Netty Publisher") public class NettyPublisherConnector { protected Log logger = LogFactory.getLog(getClass()); /** * Map of publisher names and their port numbers. Configurable from the XML. */ @Configurable private Map<String, Integer> publishers; /** * For backward compatibility, ignored */ @Configurable private String nettyPublisherName; /** * A NettyChannelInfo with a list of handlers for each publisher. Publishers are keyed by name. */ protected Map<String, NettyChannelInfo<NettyPublisherHandler>> publisherHandlers = new HashMap<String, NettyChannelInfo<NettyPublisherHandler>>(); /** * NettyChannelInfo that contains a list of source handles. */ protected NettyChannelInfo<NettySourceHandler> sourceHandlers = new NettyChannelInfo<NettySourceHandler>(); protected boolean initialised = false; /** * * @return The map of publishers and their port numbers */ public Map<String, Integer> getPublishers() { return publishers; } /** * * @param publishers The map of publishers and their port numbers */ public void setPublishers(Map<String, Integer> publishers) { this.publishers = publishers; } /** * * @return A name for this netty publisher */ public String getNettyPublisherName() { return nettyPublisherName; } /** * * @param nettyPublisherName A name for this netty publisher */ public void setNettyPublisherName(String nettyPublisherName) { this.nettyPublisherName = nettyPublisherName; } /** * @param nettyPublisherName Name of the connector * @throws ConnectionException */ //@Connect //public void connect(@ConnectionKey String nettyPublisherName) throws ConnectionException //{ //} /** * */ //@Disconnect //public void disconnect() //{ //} /** * Start all pub/sub servers. * * @throws InterruptedException */ @Start public synchronized void init() throws InterruptedException { logger.info("Initialising Netty Publisher"); if (!initialised) { startPublisherServers(); initialised = true; logger.info("Netty Publisher initialised"); } else { logger.info("Netty Publisher was already initialised"); } } /** * Close all servers (inbound endpoints) and pub/sub servers * @throws InterruptedException Exception while stopping servers */ @Stop public void destroy() throws InterruptedException { logger.info("Stopping Netty Publisher"); stopAllServers(); } /** * Start all pub/sub servers. * * @throws InterruptedException */ protected void startPublisherServers() throws InterruptedException { if (publishers != null) { for (Map.Entry<String, Integer> publisher : publishers.entrySet()) { // get the publisher name and port which would have been configured in the XML String publisherName = publisher.getKey(); Integer port = publisher.getValue(); // start the server as publisher NettyChannelInfo<NettyPublisherHandler> channelInfo = NettyUtils.startServer(port, ServerType.PUBLISHER, null, new ArrayList<NettyPublisherHandler>()); publisherHandlers.put(publisherName, channelInfo); logger.info("Netty server started publisher on port " + port); } } } /** * Close all servers (inbound endpoints) and pub/sub servers * @throws InterruptedException Exception while stopping */ protected void stopAllServers() throws InterruptedException { //close all servers (inbound endpoints) and associated connections sourceHandlers.closeAll(); logger.info("Netty server closing listener"); //close all publishers and the connections associated with each publisher for (Map.Entry<String, NettyChannelInfo<NettyPublisherHandler>> channelInfo : publisherHandlers.entrySet()) { channelInfo.getValue().closeAll(); logger.info("Netty server closing publisher"); } } /** * Are we connected */ //@ValidateConnection //public boolean isConnected() //{ // return true; //} /** * Are we connected */ //@ConnectionIdentifier //public String connectionId() //{ // return "001"; //} /** * Publishes the data on all clients connected on the publisher. * * {@sample.xml ../../../doc/NettyPublisher-connector.xml.sample nettypublisher:publish} * * @param data Content to be published * @param publisher The publisher to publish the data on * @throws UnsupportedDataTypeException thrown when data type to be written is not supported */ @Processor public void publish(String publisher, @Default(value="#[payload]") String data) throws UnsupportedDataTypeException { List<NettyPublisherHandler> publishers = publisherHandlers.get(publisher).getChannelInboundHandlers(); for(NettyPublisherHandler publisherHandler : publishers) { publisherHandler.publish(data); } } /** * Hosts a netty server and starts listening on the configured port. * * {@sample.xml../../../doc/NettyPublisher-connector.xml.sample nettypublisher:server} * * @param callback the flow's message processors' callback * @param port The port number * @throws InterruptedException If interrupted, an exception is thrown */ @Source public void server(Integer port, SourceCallback callback) throws InterruptedException { NettyUtils.startServer(port, ServerType.SOURCE, callback, new ArrayList<NettySourceHandler>()); logger.info("Netty server started listening on port " + port); } /** * To be used as a TCP client. Writes data to a TCP server * * {@sample.xml../../../doc/NettyPublisher-connector.xml.sample nettypublisher:write} * @param host The host of the server * @param port The port of the server * @param data The data to be written * @throws Exception Anything that goes wrong */ @Processor public void write(String host, Integer port, @Default(value="#[message.payload]") String data) throws Exception { NettyClientHandler clientHandler = new NettyClientHandler(); NettyChannelInfo channelInfo = NettyUtils.startClient(host, port, clientHandler); clientHandler.writeToServer(data); clientHandler.close(); channelInfo.getWorkerGroup().shutdownGracefully(); } }
Cleaning up publisher
src/main/java/com/ricston/nettypublisher/NettyPublisherConnector.java
Cleaning up publisher
Java
apache-2.0
673dc08b39a4543f9444692246e5eeb2ca267b12
0
andytsunami/DROID
package br.com.caelum.fragment; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.TextView; import br.com.caelum.cadastro.modelo.Prova; import com.caelum.cadastro.R; public class DetalhesProvaFragment extends Fragment { private Prova prova; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View layout = inflater.inflate(R.layout.provas_detalhe, container, false); if (getArguments() != null) { this.prova = (Prova) getArguments().getSerializable("prova"); } if (this.prova != null) { TextView materia = (TextView) layout .findViewById(R.id.detalhe_prova_materia); TextView data = (TextView) layout .findViewById(R.id.detalhe_prova_data); ListView topicos = (ListView) layout .findViewById(R.id.detalhe_prova_topicos); materia.setText(this.prova.getMateria()); data.setText(this.prova.getData()); ArrayAdapter<String> adapter = new ArrayAdapter<String>( getActivity(), android.R.layout.simple_list_item_1, prova.getTopicos()); topicos.setAdapter(adapter); } return layout; } }
CadastroCaelum/src/br/com/caelum/fragment/DetalhesProvaFragment.java
package br.com.caelum.fragment; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import br.com.caelum.cadastro.modelo.Prova; import com.caelum.cadastro.R; public class DetalhesProvaFragment extends Fragment { private Prova prova; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View layout = inflater.inflate(R.layout.provas_detalhe, container, false); return layout; } }
DROI3 - capitulo 04 - Atividade 20 - Mostrando os dados da prova na tela do tablet.
CadastroCaelum/src/br/com/caelum/fragment/DetalhesProvaFragment.java
DROI3 - capitulo 04 - Atividade 20 - Mostrando os dados da prova na tela do tablet.
Java
mit
f26c873bd9f8ae1dfc6e2da1335660dce970d63c
0
tschechlovdev/jabref,JabRef/jabref,bartsch-dev/jabref,motokito/jabref,tobiasdiez/jabref,Braunch/jabref,mredaelli/jabref,mairdl/jabref,motokito/jabref,obraliar/jabref,oscargus/jabref,jhshinn/jabref,shitikanth/jabref,grimes2/jabref,mredaelli/jabref,tschechlovdev/jabref,tobiasdiez/jabref,jhshinn/jabref,Mr-DLib/jabref,Mr-DLib/jabref,obraliar/jabref,tobiasdiez/jabref,JabRef/jabref,zellerdev/jabref,mredaelli/jabref,Mr-DLib/jabref,zellerdev/jabref,sauliusg/jabref,Braunch/jabref,zellerdev/jabref,bartsch-dev/jabref,shitikanth/jabref,oscargus/jabref,oscargus/jabref,Braunch/jabref,zellerdev/jabref,grimes2/jabref,bartsch-dev/jabref,Siedlerchr/jabref,ayanai1/jabref,zellerdev/jabref,tobiasdiez/jabref,jhshinn/jabref,mredaelli/jabref,sauliusg/jabref,mairdl/jabref,sauliusg/jabref,mairdl/jabref,ayanai1/jabref,obraliar/jabref,shitikanth/jabref,motokito/jabref,jhshinn/jabref,bartsch-dev/jabref,Siedlerchr/jabref,oscargus/jabref,tschechlovdev/jabref,Siedlerchr/jabref,tschechlovdev/jabref,grimes2/jabref,motokito/jabref,motokito/jabref,JabRef/jabref,Mr-DLib/jabref,obraliar/jabref,ayanai1/jabref,mredaelli/jabref,Mr-DLib/jabref,Braunch/jabref,tschechlovdev/jabref,JabRef/jabref,Siedlerchr/jabref,mairdl/jabref,ayanai1/jabref,shitikanth/jabref,grimes2/jabref,grimes2/jabref,sauliusg/jabref,shitikanth/jabref,jhshinn/jabref,Braunch/jabref,oscargus/jabref,ayanai1/jabref,obraliar/jabref,mairdl/jabref,bartsch-dev/jabref
/* Copyright (C) 2003 David Weitzman, Morten O. Alver All programs in this directory and subdirectories are published under the GNU General Public License as described below. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Further information about the GNU GPL is available at: http://www.gnu.org/copyleft/gpl.ja.html Note: Modified for use in JabRef */ package net.sf.jabref.model; import net.sf.jabref.*; import net.sf.jabref.logic.l10n.Localization; import net.sf.jabref.logic.util.MonthUtil; import java.beans.PropertyVetoException; import java.beans.VetoableChangeListener; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class BibtexDatabase { private final Map<String, BibtexEntry> entries = new ConcurrentHashMap<>(); private String preamble; private final Map<String, BibtexString> bibtexStrings = new ConcurrentHashMap<>(); private final Set<DatabaseChangeListener> changeListeners = new HashSet<>(); private boolean followCrossrefs = true; private static final Log LOGGER = LogFactory.getLog(BibtexDatabase.class); /** * use a map instead of a set since i need to know how many of each key is * inthere */ private final HashMap<String, Integer> allKeys = new HashMap<>(); /* * Entries are stored in a HashMap with the ID as key. What happens if * someone changes a BibtexEntry's ID after it has been added to this * BibtexDatabase? The key of that entry would be the old ID, not the new * one. Use a PropertyChangeListener to identify an ID change and update the * Map. */ private final VetoableChangeListener listener = propertyChangeEvent -> { if (propertyChangeEvent.getPropertyName() == null) { fireDatabaseChanged(new DatabaseChangeEvent(BibtexDatabase.this, DatabaseChangeEvent.ChangeType.CHANGING_ENTRY, (BibtexEntry) propertyChangeEvent.getSource())); } else if ("id".equals(propertyChangeEvent.getPropertyName())) { // locate the entry under its old key BibtexEntry oldEntry = entries.remove(propertyChangeEvent.getOldValue()); if (oldEntry != propertyChangeEvent.getSource()) { // Something is very wrong! // The entry under the old key isn't // the one that sent this event. // Restore the old state. entries.put((String) propertyChangeEvent.getOldValue(), oldEntry); throw new PropertyVetoException("Wrong old ID", propertyChangeEvent); } if (entries.get(propertyChangeEvent.getNewValue()) != null) { entries.put((String) propertyChangeEvent.getOldValue(), oldEntry); throw new PropertyVetoException ("New ID already in use, please choose another", propertyChangeEvent); } // and re-file this entry entries.put((String) propertyChangeEvent.getNewValue(), (BibtexEntry) propertyChangeEvent.getSource()); } else { fireDatabaseChanged(new DatabaseChangeEvent(BibtexDatabase.this, DatabaseChangeEvent.ChangeType.CHANGED_ENTRY, (BibtexEntry) propertyChangeEvent.getSource())); } }; /** * Returns the number of entries. */ public int getEntryCount() { return entries.size(); } /** * Returns a Set containing the keys to all entries. * Use getKeySet().iterator() to iterate over all entries. */ public Set<String> getKeySet() { return entries.keySet(); } /** * Returns an EntrySorter with the sorted entries from this base, * sorted by the given Comparator. */ public synchronized EntrySorter getSorter(Comparator<BibtexEntry> comp) { EntrySorter sorter = new EntrySorter(entries, comp); addDatabaseChangeListener(sorter); return sorter; } /** * Just temporary, for testing purposes.... * * @return */ public Map<String, BibtexEntry> getEntryMap() { return entries; } /** * Returns the entry with the given ID (-> entry_type + hashcode). */ public BibtexEntry getEntryById(String id) { return entries.get(id); } public Collection<BibtexEntry> getEntries() { return entries.values(); } /** * Returns the entry with the given bibtex key. */ public synchronized BibtexEntry getEntryByKey(String key) { BibtexEntry back = null; int keyHash = key.hashCode(); // key hash for better performance Set<String> keySet = entries.keySet(); for (String entrieID : keySet) { BibtexEntry entry = getEntryById(entrieID); if (entry != null && entry.getCiteKey() != null) { String citeKey = entry.getCiteKey(); if (citeKey != null) { if (keyHash == citeKey.hashCode()) { back = entry; } } } } return back; } public synchronized BibtexEntry[] getEntriesByKey(String key) { ArrayList<BibtexEntry> entries = new ArrayList<BibtexEntry>(); for (BibtexEntry entry : this.entries.values()) { if (key.equals(entry.getCiteKey())) { entries.add(entry); } } return entries.toArray(new BibtexEntry[entries.size()]); } /** * Inserts the entry, given that its ID is not already in use. * use Util.createId(...) to make up a unique ID for an entry. */ public synchronized boolean insertEntry(BibtexEntry entry) throws KeyCollisionException { String id = entry.getId(); if (getEntryById(id) != null) { throw new KeyCollisionException( "ID is already in use, please choose another"); } entry.addPropertyChangeListener(listener); entries.put(id, entry); fireDatabaseChanged(new DatabaseChangeEvent(this, DatabaseChangeEvent.ChangeType.ADDED_ENTRY, entry)); return checkForDuplicateKeyAndAdd(null, entry.getCiteKey()); } /** * Removes the entry with the given string. * <p> * Returns null if not found. */ public synchronized BibtexEntry removeEntry(String id) { BibtexEntry oldValue = entries.remove(id); if (oldValue == null) { return null; } removeKeyFromSet(oldValue.getCiteKey()); oldValue.removePropertyChangeListener(listener); fireDatabaseChanged(new DatabaseChangeEvent(this, DatabaseChangeEvent.ChangeType.REMOVED_ENTRY, oldValue)); return oldValue; } public synchronized boolean setCiteKeyForEntry(String id, String key) { if (!entries.containsKey(id)) { return false; // Entry doesn't exist! } BibtexEntry entry = getEntryById(id); String oldKey = entry.getCiteKey(); if (key != null) { entry.setField(BibtexFields.KEY_FIELD, key); } else { entry.clearField(BibtexFields.KEY_FIELD); } return checkForDuplicateKeyAndAdd(oldKey, entry.getCiteKey()); } /** * Sets the database's preamble. */ public synchronized void setPreamble(String preamble) { this.preamble = preamble; } /** * Returns the database's preamble. */ public synchronized String getPreamble() { return preamble; } /** * Inserts a Bibtex String at the given index. */ public synchronized void addString(BibtexString string) throws KeyCollisionException { if (hasStringLabel(string.getName())) { throw new KeyCollisionException(Localization.lang("A string with this label already exists")); } if (bibtexStrings.containsKey(string.getId())) { throw new KeyCollisionException("Duplicate BibtexString id."); } bibtexStrings.put(string.getId(), string); } /** * Removes the string at the given index. */ public void removeString(String id) { bibtexStrings.remove(id); } /** * Returns a Set of keys to all BibtexString objects in the database. * These are in no sorted order. */ public Set<String> getStringKeySet() { return bibtexStrings.keySet(); } /** * Returns a Collection of all BibtexString objects in the database. * These are in no particular order. */ public Collection<BibtexString> getStringValues() { return bibtexStrings.values(); } /** * Returns the string at the given index. */ public BibtexString getString(String o) { return bibtexStrings.get(o); } /** * Returns the number of strings. */ public int getStringCount() { return bibtexStrings.size(); } /** * Returns true if a string with the given label already exists. */ public synchronized boolean hasStringLabel(String label) { for (BibtexString value : bibtexStrings.values()) { if (value.getName().equals(label)) { return true; } } return false; } /** * Resolves any references to strings contained in this field content, * if possible. */ public String resolveForStrings(String content) { if (content == null) { throw new IllegalArgumentException("Content for resolveForStrings must not be null."); } return resolveContent(content, new HashSet<String>()); } /** * Take the given collection of BibtexEntry and resolve any string * references. * * @param entries A collection of BibtexEntries in which all strings of the form * #xxx# will be resolved against the hash map of string * references stored in the databasee. * @param inPlace If inPlace is true then the given BibtexEntries will be modified, if false then copies of the BibtexEntries are made before resolving the strings. * @return a list of bibtexentries, with all strings resolved. It is dependent on the value of inPlace whether copies are made or the given BibtexEntries are modified. */ public List<BibtexEntry> resolveForStrings(Collection<BibtexEntry> entries, boolean inPlace) { if (entries == null) { throw new NullPointerException(); } List<BibtexEntry> results = new ArrayList<BibtexEntry>(entries.size()); for (BibtexEntry entry : entries) { results.add(this.resolveForStrings(entry, inPlace)); } return results; } /** * Take the given BibtexEntry and resolve any string references. * * @param entry A BibtexEntry in which all strings of the form #xxx# will be * resolved against the hash map of string references stored in * the databasee. * @param inPlace If inPlace is true then the given BibtexEntry will be * modified, if false then a copy is made using close made before * resolving the strings. * @return a BibtexEntry with all string references resolved. It is * dependent on the value of inPlace whether a copy is made or the * given BibtexEntries is modified. */ public BibtexEntry resolveForStrings(BibtexEntry entry, boolean inPlace) { if (!inPlace) { entry = (BibtexEntry) entry.clone(); } for (Object field : entry.getAllFields()) { entry.setField(field.toString(), this.resolveForStrings(entry.getField(field.toString()))); } return entry; } /** * If the label represents a string contained in this database, returns * that string's content. Resolves references to other strings, taking * care not to follow a circular reference pattern. * If the string is undefined, returns null. */ private String resolveString(String label, HashSet<String> usedIds) { for (BibtexString string : bibtexStrings.values()) { //Util.pr(label+" : "+string.getName()); if (string.getName().toLowerCase().equals(label.toLowerCase())) { // First check if this string label has been resolved // earlier in this recursion. If so, we have a // circular reference, and have to stop to avoid // infinite recursion. if (usedIds.contains(string.getId())) { LOGGER.info("Stopped due to circular reference in strings: " + label); return label; } // If not, log this string's ID now. usedIds.add(string.getId()); // Ok, we found the string. Now we must make sure we // resolve any references to other strings in this one. String res = string.getContent(); res = resolveContent(res, usedIds); // Finished with recursing this branch, so we remove our // ID again: usedIds.remove(string.getId()); return res; } } // If we get to this point, the string has obviously not been defined locally. // Check if one of the standard BibTeX month strings has been used: MonthUtil.Month month = MonthUtil.getMonthByShortName(label); if (month.isValid()) { return month.fullName; } else { return null; } } private String resolveContent(String res, HashSet<String> usedIds) { //if (res.matches(".*#[-\\^\\:\\w]+#.*")) { if (res.matches(".*#[^#]+#.*")) { StringBuilder newRes = new StringBuilder(); int piv = 0; int next; while ((next = res.indexOf("#", piv)) >= 0) { // We found the next string ref. Append the text // up to it. if (next > 0) { newRes.append(res.substring(piv, next)); } int stringEnd = res.indexOf("#", next + 1); if (stringEnd >= 0) { // We found the boundaries of the string ref, // now resolve that one. String refLabel = res.substring(next + 1, stringEnd); String resolved = resolveString(refLabel, usedIds); if (resolved == null) { // Could not resolve string. Display the # // characters rather than removing them: newRes.append(res.substring(next, stringEnd + 1)); } else { // The string was resolved, so we display its meaning only, // stripping the # characters signifying the string label: newRes.append(resolved); } piv = stringEnd + 1; } else { // We didn't find the boundaries of the string ref. This // makes it impossible to interpret it as a string label. // So we should just append the rest of the text and finish. newRes.append(res.substring(next)); piv = res.length(); break; } } if (piv < res.length() - 1) { newRes.append(res.substring(piv)); } res = newRes.toString(); } return res; } //########################################## // usage: // isDuplicate=checkForDuplicateKeyAndAdd( null, b.getKey() , issueDuplicateWarning); //############################################ // if the newkey already exists and is not the same as oldkey it will give a warning // else it will add the newkey to the to set and remove the oldkey private boolean checkForDuplicateKeyAndAdd(String oldKey, String newKey) { // Globals.logger(" checkForDuplicateKeyAndAdd [oldKey = " + oldKey + "] [newKey = " + newKey + "]"); boolean duplicate; if (oldKey == null) {// this is a new entry so don't bother removing oldKey duplicate = addKeyToSet(newKey); } else { if (oldKey.equals(newKey)) {// were OK because the user did not change keys duplicate = false; } else {// user changed the key // removed the oldkey // But what if more than two have the same key? // this means that user can add another key and would not get a warning! // consider this: i add a key xxx, then i add another key xxx . I get a warning. I delete the key xxx. JBM // removes this key from the allKey. then I add another key xxx. I don't get a warning! // i need a way to count the number of keys of each type // hashmap=>int (increment each time) removeKeyFromSet(oldKey); duplicate = addKeyToSet(newKey); } } if (duplicate) { LOGGER.warn(Localization.lang("Warning there is a duplicate key") + ':' + newKey); } return duplicate; } /** * Returns the number of occurences of the given key in this database. */ public int getNumberOfKeyOccurences(String key) { Object o = allKeys.get(key); if (o == null) { return 0; } else { return (Integer) o; } } //======================================================== // keep track of all the keys to warn if there are duplicates //======================================================== private boolean addKeyToSet(String key) { boolean exists = false; if (key == null || key.isEmpty()) { return false;//don't put empty key } if (allKeys.containsKey(key)) { // warning exists = true; allKeys.put(key, allKeys.get(key) + 1);// incrementInteger( allKeys.get(key))); } else { allKeys.put(key, 1); } return exists; } //======================================================== // reduce the number of keys by 1. if this number goes to zero then remove from the set // note: there is a good reason why we should not use a hashset but use hashmap instead //======================================================== private void removeKeyFromSet(String key) { if (key == null || key.isEmpty()) { return; } if (allKeys.containsKey(key)) { Integer tI = allKeys.get(key); // if(allKeys.get(key) instanceof Integer) if (tI == 1) { allKeys.remove(key); } else { allKeys.put(key, tI - 1);//decrementInteger( tI )); } } } private void fireDatabaseChanged(DatabaseChangeEvent e) { for (DatabaseChangeListener listener : changeListeners) { listener.databaseChanged(e); } } public void addDatabaseChangeListener(DatabaseChangeListener l) { changeListeners.add(l); } public void removeDatabaseChangeListener(DatabaseChangeListener l) { changeListeners.remove(l); } /** * Returns the text stored in the given field of the given bibtex entry * which belongs to the given database. * <p> * If a database is given, this function will try to resolve any string * references in the field-value. * Also, if a database is given, this function will try to find values for * unset fields in the entry linked by the "crossref" field, if any. * * @param field The field to return the value of. * @param bibtex maybenull * The bibtex entry which contains the field. * @param database maybenull * The database of the bibtex entry. * @return The resolved field value or null if not found. */ public static String getResolvedField(String field, BibtexEntry bibtex, BibtexDatabase database) { if (field.equals("bibtextype")) { return bibtex.getType().getName(); } // TODO: Changed this to also consider alias fields, which is the expected // behavior for the preview layout and for the check whatever all fields are present. // But there might be unwanted side-effects?! Object o = bibtex.getFieldOrAlias(field); // If this field is not set, and the entry has a crossref, try to look up the // field in the referred entry: Do not do this for the bibtex key. if (o == null && database != null && database.followCrossrefs && !field.equals(BibtexFields.KEY_FIELD)) { Object crossRef = bibtex.getField("crossref"); if (crossRef != null) { BibtexEntry referred = database.getEntryByKey((String) crossRef); if (referred != null) { // Ok, we found the referred entry. Get the field value from that // entry. If it is unset there, too, stop looking: o = referred.getField(field); } } } return BibtexDatabase.getText((String) o, database); } /** * Returns a text with references resolved according to an optionally given * database. * * @param toResolve maybenull The text to resolve. * @param database maybenull The database to use for resolving the text. * @return The resolved text or the original text if either the text or the database are null */ public static String getText(String toResolve, BibtexDatabase database) { if (toResolve != null && database != null) { return database.resolveForStrings(toResolve); } return toResolve; } public void setFollowCrossrefs(boolean followCrossrefs) { this.followCrossrefs = followCrossrefs; } }
src/main/java/net/sf/jabref/model/BibtexDatabase.java
/* Copyright (C) 2003 David Weitzman, Morten O. Alver All programs in this directory and subdirectories are published under the GNU General Public License as described below. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Further information about the GNU GPL is available at: http://www.gnu.org/copyleft/gpl.ja.html Note: Modified for use in JabRef */ package net.sf.jabref.model; import net.sf.jabref.*; import net.sf.jabref.logic.l10n.Localization; import net.sf.jabref.logic.util.MonthUtil; import java.beans.PropertyVetoException; import java.beans.VetoableChangeListener; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class BibtexDatabase { private final Map<String, BibtexEntry> entries = new ConcurrentHashMap<>(); private String preamble; private final Map<String, BibtexString> bibtexStrings = new ConcurrentHashMap<>(); private final Set<DatabaseChangeListener> changeListeners = new HashSet<>(); private boolean followCrossrefs = true; private static final Log LOGGER = LogFactory.getLog(BibtexDatabase.class); /** * use a map instead of a set since i need to know how many of each key is * inthere */ private final HashMap<String, Integer> allKeys = new HashMap<>(); /* * Entries are stored in a HashMap with the ID as key. What happens if * someone changes a BibtexEntry's ID after it has been added to this * BibtexDatabase? The key of that entry would be the old ID, not the new * one. Use a PropertyChangeListener to identify an ID change and update the * Map. */ private final VetoableChangeListener listener = propertyChangeEvent -> { if (propertyChangeEvent.getPropertyName() == null) { fireDatabaseChanged(new DatabaseChangeEvent(BibtexDatabase.this, DatabaseChangeEvent.ChangeType.CHANGING_ENTRY, (BibtexEntry) propertyChangeEvent.getSource())); } else if ("id".equals(propertyChangeEvent.getPropertyName())) { // locate the entry under its old key BibtexEntry oldEntry = entries.remove(propertyChangeEvent.getOldValue()); if (oldEntry != propertyChangeEvent.getSource()) { // Something is very wrong! // The entry under the old key isn't // the one that sent this event. // Restore the old state. entries.put((String) propertyChangeEvent.getOldValue(), oldEntry); throw new PropertyVetoException("Wrong old ID", propertyChangeEvent); } if (entries.get(propertyChangeEvent.getNewValue()) != null) { entries.put((String) propertyChangeEvent.getOldValue(), oldEntry); throw new PropertyVetoException ("New ID already in use, please choose another", propertyChangeEvent); } // and re-file this entry entries.put((String) propertyChangeEvent.getNewValue(), (BibtexEntry) propertyChangeEvent.getSource()); } else { fireDatabaseChanged(new DatabaseChangeEvent(BibtexDatabase.this, DatabaseChangeEvent.ChangeType.CHANGED_ENTRY, (BibtexEntry) propertyChangeEvent.getSource())); } }; /** * Returns the number of entries. */ public synchronized int getEntryCount() { return entries.size(); } /** * Returns a Set containing the keys to all entries. * Use getKeySet().iterator() to iterate over all entries. */ public synchronized Set<String> getKeySet() { return entries.keySet(); } /** * Returns an EntrySorter with the sorted entries from this base, * sorted by the given Comparator. */ public synchronized EntrySorter getSorter(Comparator<BibtexEntry> comp) { EntrySorter sorter = new EntrySorter(entries, comp); addDatabaseChangeListener(sorter); return sorter; } /** * Just temporary, for testing purposes.... * * @return */ public Map<String, BibtexEntry> getEntryMap() { return entries; } /** * Returns the entry with the given ID (-> entry_type + hashcode). */ public synchronized BibtexEntry getEntryById(String id) { return entries.get(id); } public synchronized Collection<BibtexEntry> getEntries() { return entries.values(); } /** * Returns the entry with the given bibtex key. */ public synchronized BibtexEntry getEntryByKey(String key) { BibtexEntry back = null; int keyHash = key.hashCode(); // key hash for better performance Set<String> keySet = entries.keySet(); for (String entrieID : keySet) { BibtexEntry entry = getEntryById(entrieID); if (entry != null && entry.getCiteKey() != null) { String citeKey = entry.getCiteKey(); if (citeKey != null) { if (keyHash == citeKey.hashCode()) { back = entry; } } } } return back; } public synchronized BibtexEntry[] getEntriesByKey(String key) { ArrayList<BibtexEntry> entries = new ArrayList<BibtexEntry>(); for (BibtexEntry entry : this.entries.values()) { if (key.equals(entry.getCiteKey())) { entries.add(entry); } } return entries.toArray(new BibtexEntry[entries.size()]); } /** * Inserts the entry, given that its ID is not already in use. * use Util.createId(...) to make up a unique ID for an entry. */ public synchronized boolean insertEntry(BibtexEntry entry) throws KeyCollisionException { String id = entry.getId(); if (getEntryById(id) != null) { throw new KeyCollisionException( "ID is already in use, please choose another"); } entry.addPropertyChangeListener(listener); entries.put(id, entry); fireDatabaseChanged(new DatabaseChangeEvent(this, DatabaseChangeEvent.ChangeType.ADDED_ENTRY, entry)); return checkForDuplicateKeyAndAdd(null, entry.getCiteKey()); } /** * Removes the entry with the given string. * <p> * Returns null if not found. */ public synchronized BibtexEntry removeEntry(String id) { BibtexEntry oldValue = entries.remove(id); if (oldValue == null) { return null; } removeKeyFromSet(oldValue.getCiteKey()); oldValue.removePropertyChangeListener(listener); fireDatabaseChanged(new DatabaseChangeEvent(this, DatabaseChangeEvent.ChangeType.REMOVED_ENTRY, oldValue)); return oldValue; } public synchronized boolean setCiteKeyForEntry(String id, String key) { if (!entries.containsKey(id)) { return false; // Entry doesn't exist! } BibtexEntry entry = getEntryById(id); String oldKey = entry.getCiteKey(); if (key != null) { entry.setField(BibtexFields.KEY_FIELD, key); } else { entry.clearField(BibtexFields.KEY_FIELD); } return checkForDuplicateKeyAndAdd(oldKey, entry.getCiteKey()); } /** * Sets the database's preamble. */ public synchronized void setPreamble(String preamble) { this.preamble = preamble; } /** * Returns the database's preamble. */ public synchronized String getPreamble() { return preamble; } /** * Inserts a Bibtex String at the given index. */ public synchronized void addString(BibtexString string) throws KeyCollisionException { if (hasStringLabel(string.getName())) { throw new KeyCollisionException(Localization.lang("A string with this label already exists")); } if (bibtexStrings.containsKey(string.getId())) { throw new KeyCollisionException("Duplicate BibtexString id."); } bibtexStrings.put(string.getId(), string); } /** * Removes the string at the given index. */ public synchronized void removeString(String id) { bibtexStrings.remove(id); } /** * Returns a Set of keys to all BibtexString objects in the database. * These are in no sorted order. */ public Set<String> getStringKeySet() { return bibtexStrings.keySet(); } /** * Returns a Collection of all BibtexString objects in the database. * These are in no particular order. */ public Collection<BibtexString> getStringValues() { return bibtexStrings.values(); } /** * Returns the string at the given index. */ public synchronized BibtexString getString(String o) { return bibtexStrings.get(o); } /** * Returns the number of strings. */ public synchronized int getStringCount() { return bibtexStrings.size(); } /** * Returns true if a string with the given label already exists. */ public synchronized boolean hasStringLabel(String label) { for (BibtexString value : bibtexStrings.values()) { if (value.getName().equals(label)) { return true; } } return false; } /** * Resolves any references to strings contained in this field content, * if possible. */ public String resolveForStrings(String content) { if (content == null) { throw new IllegalArgumentException("Content for resolveForStrings must not be null."); } return resolveContent(content, new HashSet<String>()); } /** * Take the given collection of BibtexEntry and resolve any string * references. * * @param entries A collection of BibtexEntries in which all strings of the form * #xxx# will be resolved against the hash map of string * references stored in the databasee. * @param inPlace If inPlace is true then the given BibtexEntries will be modified, if false then copies of the BibtexEntries are made before resolving the strings. * @return a list of bibtexentries, with all strings resolved. It is dependent on the value of inPlace whether copies are made or the given BibtexEntries are modified. */ public List<BibtexEntry> resolveForStrings(Collection<BibtexEntry> entries, boolean inPlace) { if (entries == null) { throw new NullPointerException(); } List<BibtexEntry> results = new ArrayList<BibtexEntry>(entries.size()); for (BibtexEntry entry : entries) { results.add(this.resolveForStrings(entry, inPlace)); } return results; } /** * Take the given BibtexEntry and resolve any string references. * * @param entry A BibtexEntry in which all strings of the form #xxx# will be * resolved against the hash map of string references stored in * the databasee. * @param inPlace If inPlace is true then the given BibtexEntry will be * modified, if false then a copy is made using close made before * resolving the strings. * @return a BibtexEntry with all string references resolved. It is * dependent on the value of inPlace whether a copy is made or the * given BibtexEntries is modified. */ public BibtexEntry resolveForStrings(BibtexEntry entry, boolean inPlace) { if (!inPlace) { entry = (BibtexEntry) entry.clone(); } for (Object field : entry.getAllFields()) { entry.setField(field.toString(), this.resolveForStrings(entry.getField(field.toString()))); } return entry; } /** * If the label represents a string contained in this database, returns * that string's content. Resolves references to other strings, taking * care not to follow a circular reference pattern. * If the string is undefined, returns null. */ private String resolveString(String label, HashSet<String> usedIds) { for (BibtexString string : bibtexStrings.values()) { //Util.pr(label+" : "+string.getName()); if (string.getName().toLowerCase().equals(label.toLowerCase())) { // First check if this string label has been resolved // earlier in this recursion. If so, we have a // circular reference, and have to stop to avoid // infinite recursion. if (usedIds.contains(string.getId())) { LOGGER.info("Stopped due to circular reference in strings: " + label); return label; } // If not, log this string's ID now. usedIds.add(string.getId()); // Ok, we found the string. Now we must make sure we // resolve any references to other strings in this one. String res = string.getContent(); res = resolveContent(res, usedIds); // Finished with recursing this branch, so we remove our // ID again: usedIds.remove(string.getId()); return res; } } // If we get to this point, the string has obviously not been defined locally. // Check if one of the standard BibTeX month strings has been used: MonthUtil.Month month = MonthUtil.getMonthByShortName(label); if (month.isValid()) { return month.fullName; } else { return null; } } private String resolveContent(String res, HashSet<String> usedIds) { //if (res.matches(".*#[-\\^\\:\\w]+#.*")) { if (res.matches(".*#[^#]+#.*")) { StringBuilder newRes = new StringBuilder(); int piv = 0; int next; while ((next = res.indexOf("#", piv)) >= 0) { // We found the next string ref. Append the text // up to it. if (next > 0) { newRes.append(res.substring(piv, next)); } int stringEnd = res.indexOf("#", next + 1); if (stringEnd >= 0) { // We found the boundaries of the string ref, // now resolve that one. String refLabel = res.substring(next + 1, stringEnd); String resolved = resolveString(refLabel, usedIds); if (resolved == null) { // Could not resolve string. Display the # // characters rather than removing them: newRes.append(res.substring(next, stringEnd + 1)); } else { // The string was resolved, so we display its meaning only, // stripping the # characters signifying the string label: newRes.append(resolved); } piv = stringEnd + 1; } else { // We didn't find the boundaries of the string ref. This // makes it impossible to interpret it as a string label. // So we should just append the rest of the text and finish. newRes.append(res.substring(next)); piv = res.length(); break; } } if (piv < res.length() - 1) { newRes.append(res.substring(piv)); } res = newRes.toString(); } return res; } //########################################## // usage: // isDuplicate=checkForDuplicateKeyAndAdd( null, b.getKey() , issueDuplicateWarning); //############################################ // if the newkey already exists and is not the same as oldkey it will give a warning // else it will add the newkey to the to set and remove the oldkey private boolean checkForDuplicateKeyAndAdd(String oldKey, String newKey) { // Globals.logger(" checkForDuplicateKeyAndAdd [oldKey = " + oldKey + "] [newKey = " + newKey + "]"); boolean duplicate; if (oldKey == null) {// this is a new entry so don't bother removing oldKey duplicate = addKeyToSet(newKey); } else { if (oldKey.equals(newKey)) {// were OK because the user did not change keys duplicate = false; } else {// user changed the key // removed the oldkey // But what if more than two have the same key? // this means that user can add another key and would not get a warning! // consider this: i add a key xxx, then i add another key xxx . I get a warning. I delete the key xxx. JBM // removes this key from the allKey. then I add another key xxx. I don't get a warning! // i need a way to count the number of keys of each type // hashmap=>int (increment each time) removeKeyFromSet(oldKey); duplicate = addKeyToSet(newKey); } } if (duplicate) { LOGGER.warn(Localization.lang("Warning there is a duplicate key") + ':' + newKey); } return duplicate; } /** * Returns the number of occurences of the given key in this database. */ public int getNumberOfKeyOccurences(String key) { Object o = allKeys.get(key); if (o == null) { return 0; } else { return (Integer) o; } } //======================================================== // keep track of all the keys to warn if there are duplicates //======================================================== private boolean addKeyToSet(String key) { boolean exists = false; if (key == null || key.isEmpty()) { return false;//don't put empty key } if (allKeys.containsKey(key)) { // warning exists = true; allKeys.put(key, allKeys.get(key) + 1);// incrementInteger( allKeys.get(key))); } else { allKeys.put(key, 1); } return exists; } //======================================================== // reduce the number of keys by 1. if this number goes to zero then remove from the set // note: there is a good reason why we should not use a hashset but use hashmap instead //======================================================== private void removeKeyFromSet(String key) { if (key == null || key.isEmpty()) { return; } if (allKeys.containsKey(key)) { Integer tI = allKeys.get(key); // if(allKeys.get(key) instanceof Integer) if (tI == 1) { allKeys.remove(key); } else { allKeys.put(key, tI - 1);//decrementInteger( tI )); } } } private void fireDatabaseChanged(DatabaseChangeEvent e) { for (DatabaseChangeListener listener : changeListeners) { listener.databaseChanged(e); } } public void addDatabaseChangeListener(DatabaseChangeListener l) { changeListeners.add(l); } public void removeDatabaseChangeListener(DatabaseChangeListener l) { changeListeners.remove(l); } /** * Returns the text stored in the given field of the given bibtex entry * which belongs to the given database. * <p> * If a database is given, this function will try to resolve any string * references in the field-value. * Also, if a database is given, this function will try to find values for * unset fields in the entry linked by the "crossref" field, if any. * * @param field The field to return the value of. * @param bibtex maybenull * The bibtex entry which contains the field. * @param database maybenull * The database of the bibtex entry. * @return The resolved field value or null if not found. */ public static String getResolvedField(String field, BibtexEntry bibtex, BibtexDatabase database) { if (field.equals("bibtextype")) { return bibtex.getType().getName(); } // TODO: Changed this to also consider alias fields, which is the expected // behavior for the preview layout and for the check whatever all fields are present. // But there might be unwanted side-effects?! Object o = bibtex.getFieldOrAlias(field); // If this field is not set, and the entry has a crossref, try to look up the // field in the referred entry: Do not do this for the bibtex key. if (o == null && database != null && database.followCrossrefs && !field.equals(BibtexFields.KEY_FIELD)) { Object crossRef = bibtex.getField("crossref"); if (crossRef != null) { BibtexEntry referred = database.getEntryByKey((String) crossRef); if (referred != null) { // Ok, we found the referred entry. Get the field value from that // entry. If it is unset there, too, stop looking: o = referred.getField(field); } } } return BibtexDatabase.getText((String) o, database); } /** * Returns a text with references resolved according to an optionally given * database. * * @param toResolve maybenull The text to resolve. * @param database maybenull The database to use for resolving the text. * @return The resolved text or the original text if either the text or the database are null */ public static String getText(String toResolve, BibtexDatabase database) { if (toResolve != null && database != null) { return database.resolveForStrings(toResolve); } return toResolve; } public void setFollowCrossrefs(boolean followCrossrefs) { this.followCrossrefs = followCrossrefs; } }
Remove unnecessary synchronization
src/main/java/net/sf/jabref/model/BibtexDatabase.java
Remove unnecessary synchronization
Java
mit
a870794540ebf38a7a752ff91b16ba58f0891f80
0
analogweb/guice-plugin
package org.analogweb.guice; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.analogweb.ContainerAdaptor; import org.analogweb.util.Assertion; import org.analogweb.util.logging.Log; import org.analogweb.util.logging.Logs; import com.google.inject.Binding; import com.google.inject.Injector; import com.google.inject.TypeLiteral; /** * {@link Injector}が管理するオブジェクトのインスタンスを取得する * {@link ContainerAdaptor}の実装です。 * @author snowgoose */ public class GuiceContainerAdaptor implements ContainerAdaptor { private static final Log log = Logs.getLog(GuiceContainerAdaptor.class); private final Injector injector; public GuiceContainerAdaptor(Injector injector) { Assertion.notNull(injector, Injector.class.getName()); this.injector = injector; } @Override public <T> T getInstanceOfType(Class<T> type) { Injector injector = getInjector(); List<Binding<T>> bindings = findBindings(type, injector); if (bindings.isEmpty()) { return null; } T instance = injector.getInstance(bindings.get(0).getKey()); log.log(GuicePluginModulesConfig.PLUGIN_MESSAGE_RESOURCE, "DGB000001", type, instance, String.valueOf(injector.hashCode())); return instance; } @Override public <T> List<T> getInstancesOfType(Class<T> type) { Injector injector = getInjector(); List<Binding<T>> findBindingsByType = findBindings(type, injector); List<T> typeInstances = new ArrayList<T>(); for (Binding<T> binding : findBindingsByType) { T instance = injector.getInstance(binding.getKey()); typeInstances.add(instance); } log.log(GuicePluginModulesConfig.PLUGIN_MESSAGE_RESOURCE, "DGB000001", type, typeInstances, injector); return Collections.unmodifiableList(typeInstances); } private <T> List<Binding<T>> findBindings(Class<T> type, Injector injector) { TypeLiteral<T> typeLiteral = TypeLiteral.get(type); return injector.findBindingsByType(typeLiteral); } protected Injector getInjector() { return this.injector; } }
src/main/java/org/analogweb/guice/GuiceContainerAdaptor.java
package org.analogweb.guice; import java.util.ArrayList; import java.util.List; import org.analogweb.ContainerAdaptor; import org.analogweb.util.Assertion; import org.analogweb.util.logging.Log; import org.analogweb.util.logging.Logs; import com.google.inject.Binding; import com.google.inject.Injector; import com.google.inject.TypeLiteral; /** * {@link Injector}が管理するオブジェクトのインスタンスを取得する * {@link ContainerAdaptor}の実装です。 * @author snowgoose */ public class GuiceContainerAdaptor implements ContainerAdaptor { private static final Log log = Logs.getLog(GuiceContainerAdaptor.class); private final Injector injector; public GuiceContainerAdaptor(Injector injector) { Assertion.notNull(injector, Injector.class.getName()); this.injector = injector; } @Override public <T> T getInstanceOfType(Class<T> type) { Injector injector = getInjector(); List<Binding<T>> bindings = findBindings(type, injector); if (bindings.isEmpty()) { return null; } T instance = injector.getInstance(bindings.get(0).getKey()); log.log(GuicePluginModulesConfig.PLUGIN_MESSAGE_RESOURCE, "DGB000001", type, instance, String.valueOf(injector.hashCode())); return instance; } @Override public <T> List<T> getInstancesOfType(Class<T> type) { Injector injector = getInjector(); List<Binding<T>> findBindingsByType = findBindings(type, injector); List<T> typeInstances = new ArrayList<T>(); for (Binding<T> binding : findBindingsByType) { T instance = injector.getInstance(binding.getKey()); typeInstances.add(instance); } log.log(GuicePluginModulesConfig.PLUGIN_MESSAGE_RESOURCE, "DGB000001", type, typeInstances, injector); return typeInstances; } private <T> List<Binding<T>> findBindings(Class<T> type, Injector injector) { TypeLiteral<T> typeLiteral = TypeLiteral.get(type); return injector.findBindingsByType(typeLiteral); } protected Injector getInjector() { return this.injector; } }
Returns unmodifiable instances list
src/main/java/org/analogweb/guice/GuiceContainerAdaptor.java
Returns unmodifiable instances list
Java
mit
9fde75c45a8217bcc3a3f3198127ea2a70e4011a
0
nwalters512/the-blue-alliance-android,1fish2/the-blue-alliance-android,1fish2/the-blue-alliance-android,nwalters512/the-blue-alliance-android,phil-lopreiato/the-blue-alliance-android,1fish2/the-blue-alliance-android,the-blue-alliance/the-blue-alliance-android,phil-lopreiato/the-blue-alliance-android,the-blue-alliance/the-blue-alliance-android,the-blue-alliance/the-blue-alliance-android,phil-lopreiato/the-blue-alliance-android,nwalters512/the-blue-alliance-android
package com.thebluealliance.androidclient.subscribers; import com.thebluealliance.androidclient.R; import com.thebluealliance.androidclient.listitems.ListGroup; import com.thebluealliance.androidclient.models.Media; import com.thebluealliance.androidclient.types.MediaType; import android.content.res.Resources; import java.util.ArrayList; import java.util.List; public class MediaListSubscriber extends BaseAPISubscriber<List<Media>, List<ListGroup>> { private ListGroup mPhotos, mVideos; public MediaListSubscriber(Resources resources) { super(); mPhotos = new ListGroup(resources.getString(R.string.media_images_header)); mVideos = new ListGroup(resources.getString(R.string.media_videos_header)); mDataToBind = new ArrayList<>(); } @Override public void parseData() { mDataToBind.clear(); mPhotos.clear(); mVideos.clear(); for (int i=0; i < mAPIData.size(); i++) { Media media = mAPIData.get(i); MediaType mediaType = MediaType.fromString(media.getType()); if (mediaType.isImage()) { mPhotos.children.add(media); } else if (mediaType.isVideo()) { mVideos.children.add(media); } } if (!mPhotos.children.isEmpty()) { mDataToBind.add(mPhotos); } if (!mVideos.children.isEmpty()) { mDataToBind.add(mVideos); } } @Override public boolean isDataValid() { return super.isDataValid() && !mAPIData.isEmpty(); } }
android/src/main/java/com/thebluealliance/androidclient/subscribers/MediaListSubscriber.java
package com.thebluealliance.androidclient.subscribers; import com.thebluealliance.androidclient.R; import com.thebluealliance.androidclient.listitems.ListGroup; import com.thebluealliance.androidclient.models.Media; import com.thebluealliance.androidclient.types.MediaType; import android.content.res.Resources; import java.util.ArrayList; import java.util.List; public class MediaListSubscriber extends BaseAPISubscriber<List<Media>, List<ListGroup>> { private ListGroup mPhotos, mVideos; public MediaListSubscriber(Resources resources) { super(); mPhotos = new ListGroup(resources.getString(R.string.media_images_header)); mVideos = new ListGroup(resources.getString(R.string.media_videos_header)); mDataToBind = new ArrayList<>(); } @Override public void parseData() { mDataToBind.clear(); mPhotos.clear(); mVideos.clear(); for (int i=0; i < mAPIData.size(); i++) { Media media = mAPIData.get(i); MediaType mediaType = MediaType.fromString(media.getType()); if (mediaType.isImage()) { mPhotos.children.add(media); } else if (mediaType.isVideo()) { mVideos.children.add(media); } } if (!mPhotos.children.isEmpty()) { mDataToBind.add(mPhotos); } if (!mVideos.children.isEmpty()) { mDataToBind.add(mVideos); } } }
Validate media lists
android/src/main/java/com/thebluealliance/androidclient/subscribers/MediaListSubscriber.java
Validate media lists
Java
mit
f96ede7fa9fbc274f43ff9c822400dcccc030a13
0
jspspike/FTC6299
package org.firstinspires.ftc.quadx.Libraries; import android.util.Log; import com.qualcomm.hardware.adafruit.BNO055IMU; import com.qualcomm.hardware.adafruit.JustLoggingAccelerationIntegrator; import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cRangeSensor; import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import com.qualcomm.robotcore.hardware.ColorSensor; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.I2cAddr; import com.qualcomm.robotcore.hardware.Servo; import com.qualcomm.robotcore.util.ElapsedTime; import org.firstinspires.ftc.robotcore.external.navigation.Orientation; /** * Created by jspspike on 1/15/2016. */ public abstract class MyOpMode extends LinearOpMode { public static final int MOVEMENT_DELAY = 500; public boolean flyWheelRunning = true; public static DcMotor motorBL; public static DcMotor motorBR; public static DcMotor motorFL; public static DcMotor motorFR; public static DcMotor manip; public static DcMotor flywheel; public static Servo buttonPusher; public static ColorSensor floorL; public static ColorSensor floorR; public static ColorSensor beaconL; public static ColorSensor beaconR; public static BNO055IMU gyro; public static BNO055IMU.Parameters gyroParam; private static ModernRoboticsI2cRangeSensor ultra; public int gray; public double ultraDistance; public void hardwareMap() { motorBL = hardwareMap.dcMotor.get("motorBL"); motorBR = hardwareMap.dcMotor.get("motorBR"); motorFL = hardwareMap.dcMotor.get("motorFL"); motorFR = hardwareMap.dcMotor.get("motorFR"); floorL = hardwareMap.colorSensor.get("floorL"); floorR = hardwareMap.colorSensor.get("floorR"); beaconL = hardwareMap.colorSensor.get("beaconL"); beaconR = hardwareMap.colorSensor.get("beaconR"); gyro = hardwareMap.get(BNO055IMU.class, "gyro"); ultra = hardwareMap.get(ModernRoboticsI2cRangeSensor.class, "ultra"); manip = hardwareMap.dcMotor.get("manip"); flywheel = hardwareMap.dcMotor.get("fly"); buttonPusher = hardwareMap.servo.get("buttonP"); telemetry.addData("Status", "Hardware Mapped"); telemetry.update(); } public void hardwareMapTroll() { motorBL = hardwareMap.dcMotor.get("motorBL"); motorBR = hardwareMap.dcMotor.get("motorBR"); motorFL = hardwareMap.dcMotor.get("motorFL"); motorFR = hardwareMap.dcMotor.get("motorFR"); floorL = hardwareMap.colorSensor.get("floorL"); floorR = hardwareMap.colorSensor.get("floorR"); beaconL = hardwareMap.colorSensor.get("beaconL"); beaconR = hardwareMap.colorSensor.get("beaconR"); gyro = hardwareMap.get(BNO055IMU.class, "gyro"); ultra = hardwareMap.get(ModernRoboticsI2cRangeSensor.class, "ultra"); telemetry.addData("Status", "Hardware Mapped"); telemetry.update(); } public void initSensors() { telemetry.addData("Sensors", "Initializing..."); telemetry.update(); floorL.setI2cAddress(I2cAddr.create8bit(0x20)); floorR.setI2cAddress(I2cAddr.create8bit(0x2a)); beaconL.setI2cAddress(I2cAddr.create8bit(0x2c)); beaconR.setI2cAddress(I2cAddr.create8bit(0x2e)); floorL.enableLed(true); floorR.enableLed(true); beaconL.enableLed(false); beaconR.enableLed(false); gray = ( floorL.alpha() + floorR.alpha() ) / 2; ultraDistance = -1; gyroParam = new BNO055IMU.Parameters(); gyroParam.angleUnit = BNO055IMU.AngleUnit.DEGREES; gyroParam.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC; gyroParam.calibrationDataFile = "AdafruitIMUCalibration.json"; gyroParam.loggingEnabled = true; gyroParam.loggingTag = "Gryo"; gyroParam.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator(); gyro.initialize(gyroParam); telemetry.addData("Sensors", "Initialized"); telemetry.update(); } public void initServos() { buttonPusher.setPosition(.5); } public void delay(long milliseconds) { try { Thread.sleep(milliseconds); } catch (Exception e) { } } public void setMotors(double left, double right) { motorFL.setPower(left); motorBL.setPower(left); motorFR.setPower(-right); motorBR.setPower(-right); } public void stopMotors() { motorFL.setPower(0); motorBL.setPower(0); motorFR.setPower(0); motorBR.setPower(0); } public void resetEncoders() { motorFL.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorBL.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorFR.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorBR.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorFL.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); motorBL.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); motorFR.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); motorBR.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); } public int getEncoderAverage() { int encoders = 0; int value = 0; if (Math.abs(motorFL.getCurrentPosition()) > 2) { value += Math.abs(motorFL.getCurrentPosition()); encoders++; } if (Math.abs(motorFR.getCurrentPosition()) > 2) { value += Math.abs(motorFR.getCurrentPosition()); encoders++; } if (Math.abs(motorBL.getCurrentPosition()) > 2) { value += Math.abs(motorBL.getCurrentPosition()); encoders++; } if (Math.abs(motorBR.getCurrentPosition()) > 2) { value += Math.abs(motorBR.getCurrentPosition()); encoders++; } return encoders == 0 ? 0 : value / encoders; } public void resetGyro() { gyro.initialize(gyroParam); } public double getGyroYaw() { Orientation angles = gyro.getAngularOrientation(); if (Math.abs(angles.firstAngle) >= 180) return Math.abs(angles.firstAngle) - 360; return Math.abs(angles.firstAngle); } public double getGryoPitch() { Orientation angles = gyro.getAngularOrientation(); return angles.secondAngle; } public double getGyroRoll() { Orientation angles = gyro.getAngularOrientation(); return angles.thirdAngle; } public double getUltraDistance() { double value = ultra.cmUltrasonic(); if (value != 255) ultraDistance = value; return ultraDistance; } public void setServoSlow(Servo servo, double pos) throws InterruptedException { double currentPosition = servo.getPosition(); if (currentPosition - pos > 0) { for (; currentPosition > pos; currentPosition -= .005) { servo.setPosition(currentPosition); delay(1); idle(); } } else for (; currentPosition < pos; currentPosition += .005) { servo.setPosition(currentPosition); delay(1); idle(); } } public void turnPID(double pow, double deg) throws InterruptedException {turnPID(pow, deg, 5000);} public void turnPID(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; delay(MOVEMENT_DELAY); resetGyro(); double inte = 0; double power; double der; double error; double previousError = deg - getGyroYaw(); ElapsedTime time = new ElapsedTime(); time.reset(); resetStartTime(); do { error = deg - getGyroYaw(); power = pow * error * .0222; inte = inte + (getRuntime() * error * .02); der = (error - previousError) / getRuntime() * .02; power += inte + der; if (power > 1) power = 1; else if (power < -1) { power = -1; } setMotors(power, -power); resetStartTime(); previousError = error; idle(); } while (Math.abs(power) > .15 && time.milliseconds() < tim); stopMotors(); } public void turn(double pow, double deg) throws InterruptedException {turn(pow, deg, 15000);} public void turn(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; resetGyro(); delay(MOVEMENT_DELAY); ElapsedTime time = new ElapsedTime(); time.reset(); if (deg > 0) { while (deg > getGyroYaw() && time.milliseconds() < tim) { setMotors(pow, -pow); idle(); } } else { while (deg < getGyroYaw() && time.milliseconds() < tim) { setMotors(-pow, pow); idle(); } } stopMotors(); } public void moveToRange(double pow, double deg, int cm) throws InterruptedException {moveToRange(pow, deg, cm, 1.5);} public void moveToRange(double pow, double deg, int cm, double threshold) throws InterruptedException {moveToRange(pow, deg, cm, threshold, 4.0);} public void moveToRange(double pow, double deg, int cm, double threshold, double red) throws InterruptedException { moveToRange(pow, deg, cm, threshold, red, 15000);} public void moveToRange(double pow, double deg, int cm, double threshold, double red, int tim) throws InterruptedException { if (!opModeIsActive()) return; ElapsedTime time = new ElapsedTime(); resetGyro(); resetEncoders(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getEncoderAverage() && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / (red * .5), pow); else if (getUltraDistance() > cm) setMotors(pow, pow / (red * .5)); else { if (getGyroYaw() > threshold) setMotors(pow / red, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / red); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.update(); idle(); } } else { while(Math.abs(deg) > getEncoderAverage() && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / (red * 1.5), pow); else if (getUltraDistance() > cm) setMotors(pow, pow / (red * 1.5)); else { if (getGyroYaw() > threshold) setMotors(pow , pow / red); else if (getGyroYaw() < -threshold) setMotors(pow / red, pow); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.update(); idle(); } } stopMotors(); } public void moveTo(double pow, double deg) throws InterruptedException {moveTo(pow, deg, 1.5);} public void moveTo(double pow, double deg, double threshold) throws InterruptedException {moveTo(pow, deg, threshold, 4.0);} public void moveTo(double pow, double deg, double threshold, double red) throws InterruptedException { moveTo(pow, deg, threshold, red, 15000);} public void moveTo(double pow, double deg, double threshold, double red, int tim) throws InterruptedException { if (!opModeIsActive()) return; ElapsedTime time = new ElapsedTime(); resetGyro(); resetEncoders(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getEncoderAverage() && time.milliseconds() < tim) { if (getGyroYaw() > threshold) setMotors(pow / red, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / red); else setMotors(pow, pow); idle(); } } else { while(Math.abs(deg) > getEncoderAverage() && time.milliseconds() < tim) { if (getGyroYaw() > threshold) setMotors(-pow , -pow / red); else if (getGyroYaw() < -threshold) setMotors(-pow / red, -pow); else setMotors(pow, pow); idle(); } } stopMotors(); } public void turnCorr(double pow, double deg) throws InterruptedException {turnCorr(pow, deg, 8000);} public void turnCorr(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; double newPow; ElapsedTime time = new ElapsedTime(); resetGyro(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) / 80); if (newPow < .2) newPow = .2; setMotors(newPow, -newPow); idle(); } } else { while(deg < getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) /80); if (newPow < .2) newPow = .2; setMotors(-newPow, newPow); idle(); } } stopMotors(); if (getGyroYaw() > deg) { while (deg < getGyroYaw() && opModeIsActive()) { setMotors(-pow / 3, pow / 3); idle(); } } else { while (deg > getGyroYaw() && opModeIsActive()) { setMotors(pow / 3, -pow / 3); idle(); } } stopMotors(); } public void arcTurnCorr(double pow, double deg) throws InterruptedException {arcTurnCorr(pow, deg, 6000);} public void arcTurnCorr(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; double newPow; ElapsedTime time = new ElapsedTime(); resetGyro(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) / 80); if (newPow < .2) newPow = .2; setMotors(newPow, 0); idle(); } } else { while(deg < getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) /80); if (newPow < .2) newPow = .2; setMotors(0, newPow); idle(); } } stopMotors(); if (getGyroYaw() > deg) { while (deg < getGyroYaw() && opModeIsActive()) { setMotors(-pow / 3, pow / 3); idle(); } } else { while (deg > getGyroYaw() && opModeIsActive()) { setMotors(pow / 3, -pow / 3); idle(); } } stopMotors(); } public void untilWhiteRange(double pow, double cm) throws InterruptedException {untilWhiteRange(pow, cm, 1.5, 4, 7000);} public void untilWhiteRange(double pow, double cm, double threshold, double red, int tim) throws InterruptedException { if (!opModeIsActive()) return; resetEncoders(); resetGyro(); delay(1000); ElapsedTime time = new ElapsedTime(); time.reset(); if (pow > 0) { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / red, pow); else if (getUltraDistance() > cm) setMotors(pow, pow / red); else { if (getGyroYaw() > threshold) setMotors(pow / red, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / red); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.addData("Color", floorL.alpha()); telemetry.update(); idle(); } } else { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / red, pow); else if (getUltraDistance() > cm) setMotors(pow, pow / red); else { if (getGyroYaw() > threshold) setMotors(pow , pow / red); else if (getGyroYaw() < -threshold) setMotors(pow / red, pow); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.addData("Color", floorL.alpha()); telemetry.update(); idle(); } } stopMotors(); } public void untilWhite(double pow) throws InterruptedException {untilWhite(pow, 1.5, 4, 7000);} public void untilWhite(double pow, double threshold, double reduction, int tim) throws InterruptedException { if (!opModeIsActive()) return; resetEncoders(); resetGyro(); delay(1000); ElapsedTime time = new ElapsedTime(); time.reset(); if (pow > 0) { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getGyroYaw() > threshold) setMotors(pow / reduction, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / reduction); else setMotors(pow, pow); idle(); } } else { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getGyroYaw() > threshold) { setMotors(pow, pow / reduction); } else if (getGyroYaw() < -threshold) { setMotors(pow / reduction, pow); } else { setMotors(pow, pow); } idle(); } } stopMotors(); } public void whiteTurn(double pow, int turns) { int count = 0; while (count < turns) { while(floorR.alpha() < gray + 25) { setMotors(0, pow); } count++; if (floorL.alpha() > gray + 25 && floorR.alpha() > gray + 25) break; while (floorL.alpha() < gray + 25) { setMotors(-pow, 0); } count++; if (floorL.alpha() > gray + 25 && floorR.alpha() > gray + 25) break; } } public void pressRed() { int redLeft = 0; redLeft += beaconL.red() - beaconR.red(); redLeft += beaconR.blue() - beaconL.blue(); if (redLeft > 0) { buttonPusher.setPosition(.75); delay(1000); } else { buttonPusher.setPosition(.25); delay(1000); } buttonPusher.setPosition(.5); } public void pressBlue() { int blueLeft = 0; blueLeft += beaconL.blue() - beaconR.blue(); blueLeft += beaconR.red() - beaconL.red(); if (blueLeft > 0) { buttonPusher.setPosition(.75); delay(1000); } else { buttonPusher.setPosition(.25); delay(1000); } buttonPusher.setPosition(.5); } public void flyWheel(final double desiredSpeed) { Runnable flyLoop = new Runnable() { @Override public void run() { delay(300); int prevEncoderVal; double pow = .65; flywheel.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); flywheel.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); prevEncoderVal = flywheel.getCurrentPosition(); double speed; double error; while (flyWheelRunning && opModeIsActive()) { resetStartTime(); flywheel.setPower(pow); try { idle(); } catch (Exception e) { e.printStackTrace(); } delay(100); speed = (flywheel.getCurrentPosition() - prevEncoderVal) / getRuntime(); prevEncoderVal = flywheel.getCurrentPosition(); error = desiredSpeed - speed; pow += error * .002; } } }; } }
TeamCode/src/main/java/org/firstinspires/ftc/quadx/Libraries/MyOpMode.java
package org.firstinspires.ftc.quadx.Libraries; import android.util.Log; import com.qualcomm.hardware.adafruit.BNO055IMU; import com.qualcomm.hardware.adafruit.JustLoggingAccelerationIntegrator; import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cRangeSensor; import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import com.qualcomm.robotcore.hardware.ColorSensor; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.I2cAddr; import com.qualcomm.robotcore.hardware.Servo; import com.qualcomm.robotcore.util.ElapsedTime; import org.firstinspires.ftc.robotcore.external.navigation.Orientation; /** * Created by jspspike on 1/15/2016. */ public abstract class MyOpMode extends LinearOpMode { public static final int MOVEMENT_DELAY = 500; public boolean flyWheelRunning = true; public static DcMotor motorBL; public static DcMotor motorBR; public static DcMotor motorFL; public static DcMotor motorFR; public static DcMotor manip; public static DcMotor flywheel; public static Servo buttonPusher; public static ColorSensor floorL; public static ColorSensor floorR; public static ColorSensor beaconL; public static ColorSensor beaconR; public static BNO055IMU gyro; public static BNO055IMU.Parameters gyroParam; private static ModernRoboticsI2cRangeSensor ultra; public int gray; public double ultraDistance; public void hardwareMap() { motorBL = hardwareMap.dcMotor.get("motorBL"); motorBR = hardwareMap.dcMotor.get("motorBR"); motorFL = hardwareMap.dcMotor.get("motorFL"); motorFR = hardwareMap.dcMotor.get("motorFR"); floorL = hardwareMap.colorSensor.get("floorL"); floorR = hardwareMap.colorSensor.get("floorR"); beaconL = hardwareMap.colorSensor.get("beaconL"); beaconR = hardwareMap.colorSensor.get("beaconR"); gyro = hardwareMap.get(BNO055IMU.class, "gyro"); ultra = hardwareMap.get(ModernRoboticsI2cRangeSensor.class, "ultra"); manip = hardwareMap.dcMotor.get("manip"); flywheel = hardwareMap.dcMotor.get("fly"); buttonPusher = hardwareMap.servo.get("buttonP"); telemetry.addData("Status", "Hardware Mapped"); telemetry.update(); } public void hardwareMapTroll() { motorBL = hardwareMap.dcMotor.get("motorBL"); motorBR = hardwareMap.dcMotor.get("motorBR"); motorFL = hardwareMap.dcMotor.get("motorFL"); motorFR = hardwareMap.dcMotor.get("motorFR"); floorL = hardwareMap.colorSensor.get("floorL"); floorR = hardwareMap.colorSensor.get("floorR"); beaconL = hardwareMap.colorSensor.get("beaconL"); beaconR = hardwareMap.colorSensor.get("beaconR"); gyro = hardwareMap.get(BNO055IMU.class, "gyro"); ultra = hardwareMap.get(ModernRoboticsI2cRangeSensor.class, "ultra"); telemetry.addData("Status", "Hardware Mapped"); telemetry.update(); } public void initSensors() { telemetry.addData("Sensors", "Initializing..."); telemetry.update(); floorL.setI2cAddress(I2cAddr.create8bit(0x20)); floorR.setI2cAddress(I2cAddr.create8bit(0x2a)); beaconL.setI2cAddress(I2cAddr.create8bit(0x2c)); beaconR.setI2cAddress(I2cAddr.create8bit(0x2e)); floorL.enableLed(true); floorR.enableLed(true); beaconL.enableLed(false); beaconR.enableLed(false); gray = ( floorL.alpha() + floorR.alpha() ) / 2; ultraDistance = -1; gyroParam = new BNO055IMU.Parameters(); gyroParam.angleUnit = BNO055IMU.AngleUnit.DEGREES; gyroParam.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC; gyroParam.calibrationDataFile = "AdafruitIMUCalibration.json"; gyroParam.loggingEnabled = true; gyroParam.loggingTag = "Gryo"; gyroParam.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator(); gyro.initialize(gyroParam); telemetry.addData("Sensors", "Initialized"); telemetry.update(); } public void initServos() { buttonPusher.setPosition(.5); } public void delay(long milliseconds) { try { Thread.sleep(milliseconds); } catch (Exception e) { } } public void setMotors(double left, double right) { motorFL.setPower(left); motorBL.setPower(left); motorFR.setPower(-right); motorBR.setPower(-right); } public void stopMotors() { motorFL.setPower(0); motorBL.setPower(0); motorFR.setPower(0); motorBR.setPower(0); } public void resetEncoders() { motorFL.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorBL.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorFR.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorBR.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); motorFL.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); motorBL.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); motorFR.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); motorBR.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); } public int getEncoderAverage() { int encoders = 0; int value = 0; if (Math.abs(motorFL.getCurrentPosition()) > 2) { value += Math.abs(motorFL.getCurrentPosition()); encoders++; } if (Math.abs(motorFR.getCurrentPosition()) > 2) { value += Math.abs(motorFR.getCurrentPosition()); encoders++; } if (Math.abs(motorBL.getCurrentPosition()) > 2) { value += Math.abs(motorBL.getCurrentPosition()); encoders++; } if (Math.abs(motorBR.getCurrentPosition()) > 2) { value += Math.abs(motorBR.getCurrentPosition()); encoders++; } return encoders == 0 ? 0 : value / encoders; } public void resetGyro() { gyro.initialize(gyroParam); } public double getGyroYaw() { Orientation angles = gyro.getAngularOrientation(); if (Math.abs(angles.firstAngle) >= 180) return Math.abs(angles.firstAngle) - 360; return Math.abs(angles.firstAngle); } public double getGryoPitch() { Orientation angles = gyro.getAngularOrientation(); return angles.secondAngle; } public double getGyroRoll() { Orientation angles = gyro.getAngularOrientation(); return angles.thirdAngle; } public double getUltraDistance() { double value = ultra.cmUltrasonic(); if (value != 255) ultraDistance = value; return ultraDistance; } public void setServoSlow(Servo servo, double pos) throws InterruptedException { double currentPosition = servo.getPosition(); if (currentPosition - pos > 0) { for (; currentPosition > pos; currentPosition -= .005) { servo.setPosition(currentPosition); delay(1); idle(); } } else for (; currentPosition < pos; currentPosition += .005) { servo.setPosition(currentPosition); delay(1); idle(); } } public void turnPID(double pow, double deg) throws InterruptedException {turnPID(pow, deg, 5000);} public void turnPID(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; delay(MOVEMENT_DELAY); resetGyro(); double inte = 0; double power; double der; double error; double previousError = deg - getGyroYaw(); ElapsedTime time = new ElapsedTime(); time.reset(); resetStartTime(); do { error = deg - getGyroYaw(); power = pow * error * .0222; inte = inte + (getRuntime() * error * .02); der = (error - previousError) / getRuntime() * .02; power += inte + der; if (power > 1) power = 1; else if (power < -1) { power = -1; } setMotors(power, -power); resetStartTime(); previousError = error; idle(); } while (Math.abs(power) > .15 && time.milliseconds() < tim); stopMotors(); } public void turn(double pow, double deg) throws InterruptedException {turn(pow, deg, 15000);} public void turn(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; resetGyro(); delay(MOVEMENT_DELAY); ElapsedTime time = new ElapsedTime(); time.reset(); if (deg > 0) { while (deg > getGyroYaw() && time.milliseconds() < tim) { setMotors(pow, -pow); idle(); } } else { while (deg < getGyroYaw() && time.milliseconds() < tim) { setMotors(-pow, pow); idle(); } } stopMotors(); } public void moveToRange(double pow, double deg, int cm) throws InterruptedException {moveToRange(pow, deg, cm, 1.5);} public void moveToRange(double pow, double deg, int cm, double threshold) throws InterruptedException {moveToRange(pow, deg, cm, threshold, 4.0);} public void moveToRange(double pow, double deg, int cm, double threshold, double red) throws InterruptedException { moveToRange(pow, deg, cm, threshold, red, 15000);} public void moveToRange(double pow, double deg, int cm, double threshold, double red, int tim) throws InterruptedException { if (!opModeIsActive()) return; ElapsedTime time = new ElapsedTime(); resetGyro(); resetEncoders(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getEncoderAverage() && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / (red * .5), pow); else if (getUltraDistance() > cm) setMotors(pow, pow / (red * .5)); else { if (getGyroYaw() > threshold) setMotors(pow / red, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / red); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.update(); idle(); } } else { while(Math.abs(deg) > getEncoderAverage() && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / (red * 1.5), pow); else if (getUltraDistance() > cm) setMotors(pow, pow / (red * 1.5)); else { if (getGyroYaw() > threshold) setMotors(pow , pow / red); else if (getGyroYaw() < -threshold) setMotors(pow / red, pow); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.update(); idle(); } } stopMotors(); } public void moveTo(double pow, double deg) throws InterruptedException {moveTo(pow, deg, 1.5);} public void moveTo(double pow, double deg, double threshold) throws InterruptedException {moveTo(pow, deg, threshold, 4.0);} public void moveTo(double pow, double deg, double threshold, double red) throws InterruptedException { moveTo(pow, deg, threshold, red, 15000);} public void moveTo(double pow, double deg, double threshold, double red, int tim) throws InterruptedException { if (!opModeIsActive()) return; ElapsedTime time = new ElapsedTime(); resetGyro(); resetEncoders(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getEncoderAverage() && time.milliseconds() < tim) { if (getGyroYaw() > threshold) setMotors(pow / red, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / red); else setMotors(pow, pow); idle(); } } else { while(Math.abs(deg) > getEncoderAverage() && time.milliseconds() < tim) { if (getGyroYaw() > threshold) setMotors(-pow , -pow / red); else if (getGyroYaw() < -threshold) setMotors(-pow / red, -pow); else setMotors(pow, pow); idle(); } } stopMotors(); } public void turnCorr(double pow, double deg) throws InterruptedException {turnCorr(pow, deg, 8000);} public void turnCorr(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; double newPow; ElapsedTime time = new ElapsedTime(); resetGyro(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) / 80); if (newPow < .2) newPow = .2; setMotors(newPow, -newPow); idle(); } } else { while(deg < getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) /80); if (newPow < .2) newPow = .2; setMotors(-newPow, newPow); idle(); } } stopMotors(); if (getGyroYaw() > deg) { while (deg < getGyroYaw() && opModeIsActive()) { setMotors(-pow / 3, pow / 3); idle(); } } else { while (deg > getGyroYaw() && opModeIsActive()) { setMotors(pow / 3, -pow / 3); idle(); } } stopMotors(); } public void arcTurnCorr(double pow, double deg) throws InterruptedException {arcTurnCorr(pow, deg, 6000);} public void arcTurnCorr(double pow, double deg, int tim) throws InterruptedException { if (!opModeIsActive()) return; double newPow; ElapsedTime time = new ElapsedTime(); resetGyro(); delay(MOVEMENT_DELAY); time.reset(); if (deg > 0) { while(deg > getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) / 80); if (newPow < .2) newPow = .2; setMotors(newPow, 0); idle(); } } else { while(deg < getGyroYaw() && time.milliseconds() < tim) { newPow = pow * (Math.abs(deg - getGyroYaw()) /80); if (newPow < .2) newPow = .2; setMotors(0, newPow); idle(); } } stopMotors(); if (getGyroYaw() > deg) { while (deg < getGyroYaw() && opModeIsActive()) { setMotors(-pow / 3, pow / 3); idle(); } } else { while (deg > getGyroYaw() && opModeIsActive()) { setMotors(pow / 3, -pow / 3); idle(); } } stopMotors(); } public void untilWhiteRange(double pow, double cm) throws InterruptedException {untilWhiteRange(pow, cm, 1.5, 4, 7000);} public void untilWhiteRange(double pow, double cm, double threshold, double red, int tim) throws InterruptedException { if (!opModeIsActive()) return; resetEncoders(); resetGyro(); delay(1000); ElapsedTime time = new ElapsedTime(); time.reset(); if (pow > 0) { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / red, pow); else if (getUltraDistance() > cm) setMotors(pow, pow / red); else { if (getGyroYaw() > threshold) setMotors(pow / red, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / red); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.addData("Color", floorL.alpha()); telemetry.update(); idle(); } } else { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getUltraDistance() < cm) setMotors(pow / red, pow); else if (getUltraDistance() > cm) setMotors(pow, pow / red); else { if (getGyroYaw() > threshold) setMotors(pow , pow / red); else if (getGyroYaw() < -threshold) setMotors(pow / red, pow); else setMotors(pow, pow); } telemetry.addData("Gryo", getGyroYaw()); telemetry.addData("Ultra", getUltraDistance()); telemetry.addData("Color", floorL.alpha()); telemetry.update(); idle(); } } stopMotors(); } public void untilWhite(double pow) throws InterruptedException {untilWhite(pow, 1.5, 4, 7000);} public void untilWhite(double pow, double threshold, double reduction, int tim) throws InterruptedException { if (!opModeIsActive()) return; resetEncoders(); resetGyro(); delay(1000); ElapsedTime time = new ElapsedTime(); time.reset(); if (pow > 0) { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getGyroYaw() > threshold) setMotors(pow / reduction, pow); else if (getGyroYaw() < -threshold) setMotors(pow, pow / reduction); else setMotors(pow, pow); idle(); } } else { while (floorL.alpha() < gray + 25 && time.milliseconds() < tim) { if (getGyroYaw() > threshold) { setMotors(pow, pow / reduction); } else if (getGyroYaw() < -threshold) { setMotors(pow / reduction, pow); } else { setMotors(pow, pow); } idle(); } } stopMotors(); } public void pressRed() { int redLeft = 0; redLeft += beaconL.red() - beaconR.red(); redLeft += beaconR.blue() - beaconL.blue(); if (redLeft > 0) { buttonPusher.setPosition(.75); delay(1000); } else { buttonPusher.setPosition(.25); delay(1000); } buttonPusher.setPosition(.5); } public void pressBlue() { int blueLeft = 0; blueLeft += beaconL.blue() - beaconR.blue(); blueLeft += beaconR.red() - beaconL.red(); if (blueLeft > 0) { buttonPusher.setPosition(.75); delay(1000); } else { buttonPusher.setPosition(.25); delay(1000); } buttonPusher.setPosition(.5); } public void flyWheel(final double desiredSpeed) { Runnable flyLoop = new Runnable() { @Override public void run() { delay(300); int prevEncoderVal; double pow = .65; flywheel.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); flywheel.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER); prevEncoderVal = flywheel.getCurrentPosition(); double speed; double error; while (flyWheelRunning && opModeIsActive()) { resetStartTime(); flywheel.setPower(pow); try { idle(); } catch (Exception e) { e.printStackTrace(); } delay(100); speed = (flywheel.getCurrentPosition() - prevEncoderVal) / getRuntime(); prevEncoderVal = flywheel.getCurrentPosition(); error = desiredSpeed - speed; pow += error * .002; } } }; } }
Added color sensor based turn method
TeamCode/src/main/java/org/firstinspires/ftc/quadx/Libraries/MyOpMode.java
Added color sensor based turn method
Java
mit
be3281a6b028092b3fe46e3ec5c511f2398c8d7a
0
MarquisLP/WorldScribe,MarquisLP/World-Scribe
package com.averi.worldscribe.activities; import android.Manifest; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.Settings; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.view.View; import android.view.ViewGroup; import com.averi.worldscribe.R; import com.averi.worldscribe.utilities.ActivityUtilities; import com.averi.worldscribe.utilities.AppPreferences; import com.averi.worldscribe.utilities.ExternalReader; import com.averi.worldscribe.utilities.ExternalWriter; public class PermissionActivity extends ThemedActivity { public static final int REQUEST_WRITE_EXTERNAL_STORAGE = 1; private SharedPreferences preferences = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); preferences = getSharedPreferences("com.averi.worldscribe", MODE_PRIVATE); if ((!(deviceUsesRuntimePermissions())) || (writePermissionWasGranted())) { generateMissingAppDirectoryAndFiles(); goToNextActivity(); } } @Override protected int getLayoutResourceID() { return R.layout.activity_permission; } @Override protected ViewGroup getRootLayout() { return (ViewGroup) findViewById(R.id.linearScreen); } private boolean deviceUsesRuntimePermissions() { return (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1); } private boolean writePermissionWasGranted() { int permissionCheck = ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE); return (permissionCheck == PackageManager.PERMISSION_GRANTED); } public void askForWritePermission(View view) { if (preferences.getBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, true)) { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_WRITE_EXTERNAL_STORAGE); } else { goToAppSettings(); } } @Override public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) { switch (requestCode) { case REQUEST_WRITE_EXTERNAL_STORAGE: if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { enableWritePermissionPrompt(); generateMissingAppDirectoryAndFiles(); goToNextActivity(); } else if (userDisabledAskingForWritePermission()) { recordDisablingOfWritePermissionPrompt(); } } } private void enableWritePermissionPrompt() { preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, true).apply(); } private void recordDisablingOfWritePermissionPrompt() { preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, false).apply(); } private boolean userDisabledAskingForWritePermission() { return (!(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE))); } private void goToAppSettings() { Intent intent = new Intent(); intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS); Uri uri = Uri.fromParts("package", this.getPackageName(), null); intent.setData(uri); startActivity(intent); } /** * Generates the app directory and any necessary configuration files if they * are missing from the user's external storage. */ private void generateMissingAppDirectoryAndFiles() { if (!(ExternalReader.appDirectoryExists())) { ExternalWriter.createAppDirectory(); } if (!(ExternalReader.noMediaFileExists())) { ExternalWriter.createNoMediaFile(); } } private void goToNextActivity() { String lastOpenedWorldName = preferences.getString(AppPreferences.LAST_OPENED_WORLD, ""); if ((!(lastOpenedWorldName.isEmpty())) && (ExternalReader.worldAlreadyExists(lastOpenedWorldName))) { goToLastOpenedWorld(lastOpenedWorldName); } else { setLastOpenedWorldToNothing(); if (ExternalReader.worldListIsEmpty()) { goToWorldCreation(); } else { goToCreateOrLoadWorld(); } } } private void goToLastOpenedWorld(String lastOpenedWorldName) { ActivityUtilities.goToWorld(this, lastOpenedWorldName); finish(); } private void setLastOpenedWorldToNothing() { preferences.edit().putString(AppPreferences.LAST_OPENED_WORLD, "").apply(); } private void goToWorldCreation() { Intent goToWorldCreationIntent = new Intent(this, CreateWorldActivity.class); startActivity(goToWorldCreationIntent); finish(); } private void goToCreateOrLoadWorld() { Intent goToCreateOrLoadWorldIntent = new Intent(this, CreateOrLoadWorldActivity.class); startActivity(goToCreateOrLoadWorldIntent); finish(); } }
app/src/main/java/com/averi/worldscribe/activities/PermissionActivity.java
package com.averi.worldscribe.activities; import android.Manifest; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.Settings; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.view.View; import android.view.ViewGroup; import com.averi.worldscribe.R; import com.averi.worldscribe.utilities.ActivityUtilities; import com.averi.worldscribe.utilities.AppPreferences; import com.averi.worldscribe.utilities.ExternalReader; import com.averi.worldscribe.utilities.ExternalWriter; public class PermissionActivity extends ThemedActivity { public static final int REQUEST_WRITE_EXTERNAL_STORAGE = 1; private SharedPreferences preferences = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); preferences = getSharedPreferences("com.averi.worldscribe", MODE_PRIVATE); if (!(ExternalReader.appDirectoryExists())) { ExternalWriter.createAppDirectory(); } if (!(ExternalReader.noMediaFileExists())) { ExternalWriter.createNoMediaFile(); } if ((!(deviceUsesRuntimePermissions())) || (writePermissionWasGranted())) { goToNextActivity(); } } @Override protected int getLayoutResourceID() { return R.layout.activity_permission; } @Override protected ViewGroup getRootLayout() { return (ViewGroup) findViewById(R.id.linearScreen); } private boolean deviceUsesRuntimePermissions() { return (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1); } private boolean writePermissionWasGranted() { int permissionCheck = ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE); return (permissionCheck == PackageManager.PERMISSION_GRANTED); } public void askForWritePermission(View view) { if (preferences.getBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, true)) { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_WRITE_EXTERNAL_STORAGE); } else { goToAppSettings(); } } @Override public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) { switch (requestCode) { case REQUEST_WRITE_EXTERNAL_STORAGE: if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { enableWritePermissionPrompt(); goToNextActivity(); } else if (userDisabledAskingForWritePermission()) { recordDisablingOfWritePermissionPrompt(); } } } private void enableWritePermissionPrompt() { preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, true).apply(); } private void recordDisablingOfWritePermissionPrompt() { preferences.edit().putBoolean(AppPreferences.WRITE_PERMISSION_PROMPT_IS_ENABLED, false).apply(); } private boolean userDisabledAskingForWritePermission() { return (!(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE))); } private void goToAppSettings() { Intent intent = new Intent(); intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS); Uri uri = Uri.fromParts("package", this.getPackageName(), null); intent.setData(uri); startActivity(intent); } private void goToNextActivity() { String lastOpenedWorldName = preferences.getString(AppPreferences.LAST_OPENED_WORLD, ""); if ((!(lastOpenedWorldName.isEmpty())) && (ExternalReader.worldAlreadyExists(lastOpenedWorldName))) { goToLastOpenedWorld(lastOpenedWorldName); } else { setLastOpenedWorldToNothing(); if (!(ExternalReader.appDirectoryExists())) { ExternalWriter.createAppDirectory(); } if (ExternalReader.worldListIsEmpty()) { goToWorldCreation(); } else { goToCreateOrLoadWorld(); } } } private void goToLastOpenedWorld(String lastOpenedWorldName) { ActivityUtilities.goToWorld(this, lastOpenedWorldName); finish(); } private void setLastOpenedWorldToNothing() { preferences.edit().putString(AppPreferences.LAST_OPENED_WORLD, "").apply(); } private void goToWorldCreation() { Intent goToWorldCreationIntent = new Intent(this, CreateWorldActivity.class); startActivity(goToWorldCreationIntent); finish(); } private void goToCreateOrLoadWorld() { Intent goToCreateOrLoadWorldIntent = new Intent(this, CreateOrLoadWorldActivity.class); startActivity(goToCreateOrLoadWorldIntent); finish(); } }
Modify files only after user grants permission The calls to generate the app directory and .nomedia file in onCreate() wouldn't work when the app is first launched, as the user hasn't granted file permissions yet.
app/src/main/java/com/averi/worldscribe/activities/PermissionActivity.java
Modify files only after user grants permission
Java
mit
4fd3a7e9c79ee6aa64684fb543e57a9b03392730
0
Naftoreiclag/Easy-Perms-Maker
package naftoreiclag.easypermsmaker; import javax.imageio.ImageIO; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.border.EmptyBorder; import javax.swing.plaf.FontUIResource; import javax.swing.ImageIcon; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JTabbedPane; import javax.swing.UIManager; import javax.swing.UIManager.LookAndFeelInfo; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.EventQueue; import java.awt.Font; import java.awt.Image; import javax.swing.SpringLayout; import naftoreiclag.easypermsmaker.customswings.JPanelTextured; import naftoreiclag.easypermsmaker.utilities.SpringUtilities; import java.awt.Color; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Enumeration; //I named this class "Main" just so java newbies can find the //main method faster! Aren't I so nice? :) @SuppressWarnings("serial") public class Main extends JFrame { public static boolean isNimbus; public static final String dir_images = "resources/images/"; public final JPanel mainPanel; public final JTabbedPane tabHolder; public final JPanel tab_controls; public final JPanel tab_classes; public final JPanel tab_permissions; public final JPanel tab_users; public final JComboBox<String> combo_exportSelection; public static ImageIcon icon_controls; public static ImageIcon icon_classes; public static ImageIcon icon_permissions; public static ImageIcon icon_users; public static Image img_wallpaper; public Main() { // SET UP THE WINDOW // ================= // Set the display name super("Naftoreiclag's Easy Perms Maker"); // Close when we click the X setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Default size setBounds(100, 100, 450, 300); // Make a new panel mainPanel = new JPanelTextured(img_wallpaper); //mainPanel.setBackground(new Color(0x6D89BC)); // Make stuff in the panel fit to max size mainPanel.setBorder(new EmptyBorder(0, 0, 0, 0)); mainPanel.setLayout(new BorderLayout(0, 0)); // This panel is now the main content provider setContentPane(mainPanel); // SET UP TABS // =========== // Set up the tabs tabHolder = new JTabbedPane(JTabbedPane.TOP); // We always want the tabs to be horizontal //tabHolder.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT); // Add these tabs to the main panel mainPanel.add(tabHolder); // CONTROLS PANEL TAB // ================== // Make a new panel and add it to our tabs tab_controls = new JPanel(); tabHolder.addTab(null, icon_controls, tab_controls, "Controls"); // Set up layout tab_controls.setLayout(new SpringLayout()); // Permissions selection interface tab_controls.add(new JLabel("Permissions Plugin:")); // Combo selection combo_exportSelection = new JComboBox<String>(ExportCodeDatabase.getComboBoxSelectionStuff()); combo_exportSelection.setSelectedIndex(0); combo_exportSelection.setPreferredSize(new Dimension(200, combo_exportSelection.getPreferredSize().height)); combo_exportSelection.setMaximumSize(combo_exportSelection.getPreferredSize()); tab_controls.add(combo_exportSelection); // //tab_controls.add(new JLabel("Permissions Plugin:")); // SpringUtilities.makeCompactGrid(tab_controls, 1, 2, 15, 5, 5, 5); tab_classes = new JPanel(); tabHolder.addTab(null, icon_classes, tab_classes, "Classes"); tab_classes.setLayout(new SpringLayout()); tab_permissions = new JPanel(); tabHolder.addTab(null, icon_permissions, tab_permissions, "Class Permissions"); tab_permissions.setLayout(new BorderLayout(0, 0)); tab_users = new JPanel(); tabHolder.addTab(null, icon_users, tab_users, "Users"); tab_users.setLayout(new BorderLayout(0, 0)); } // Load our pretty images private static void loadImagesAndIcons() { icon_controls = new ImageIcon("resources/images/preferences-desktop.png", null); icon_classes = new ImageIcon("resources/images/preferences-system-windows.png", null); icon_permissions = new ImageIcon("resources/images/accessories-text-editor.png", null); icon_users = new ImageIcon("resources/images/system-users.png", null); img_wallpaper = loadImageWithComplaints("wallpaper.png"); } // Auxilary method for loading images with "handled" errors private static Image loadImageWithComplaints(String filename) { Image returnVal = null; try { returnVal = ImageIO.read(new File(dir_images + filename)); } catch (IOException e) { System.err.println("Could not load image: " + filename); } return returnVal; } // Set the LAF to something that looks cool private static void setupLookAndFeel() { // Try set it to nimbus try { for(LookAndFeelInfo laf : UIManager.getInstalledLookAndFeels()) { if(laf.getName().equalsIgnoreCase("Nimbus")) { UIManager.setLookAndFeel(laf.getClassName()); isNimbus = true; return; } } } catch(Exception e) { System.err.println("Could not set LAF to Nimbus!"); }; } // This is where the magic begins public static void main(String args[]) { // Load export types ExportCodeDatabase.registerTypes(); // Make it look cool loadImagesAndIcons(); setupLookAndFeel(); UIManager.getLookAndFeelDefaults().put("defaultFont", new Font("Arial", Font.PLAIN, 20)); // Do this stuff later (which in java-ese it means that we do it almost now) EventQueue.invokeLater(new Runnable() { // Do this public void run() { // Try try { // Make a new me Main main = new Main(); // And then show it main.setVisible(true); } // If you fail catch (Exception e) { // Bleh e.printStackTrace(); } } }); } }
java/naftoreiclag/easypermsmaker/Main.java
package naftoreiclag.easypermsmaker; import javax.imageio.ImageIO; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.border.EmptyBorder; import javax.swing.ImageIcon; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JTabbedPane; import javax.swing.UIManager; import javax.swing.UIManager.LookAndFeelInfo; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.EventQueue; import java.awt.Image; import javax.swing.SpringLayout; import naftoreiclag.easypermsmaker.customswings.JPanelTextured; import naftoreiclag.easypermsmaker.utilities.SpringUtilities; import java.awt.Color; import java.io.File; import java.io.IOException; import java.net.URL; //I named this class "Main" just so java newbies can find the //main method faster! Aren't I so nice? :) @SuppressWarnings("serial") public class Main extends JFrame { public static boolean isNimbus; public static final String dir_images = "resources/images/"; public final JPanel mainPanel; public final JTabbedPane tabHolder; public final JPanel tab_controls; public final JPanel tab_classes; public final JPanel tab_permissions; public final JPanel tab_users; public final JComboBox<String> combo_exportSelection; public static ImageIcon icon_controls; public static ImageIcon icon_classes; public static ImageIcon icon_permissions; public static ImageIcon icon_users; public static Image img_wallpaper; public Main() { // SET UP THE WINDOW // ================= // Set the display name super("Naftoreiclag's Easy Perms Maker"); // Close when we click the X setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Default size setBounds(100, 100, 450, 300); // Make a new panel mainPanel = new JPanelTextured(img_wallpaper); //mainPanel.setBackground(new Color(0x6D89BC)); // Make stuff in the panel fit to max size mainPanel.setBorder(new EmptyBorder(0, 0, 0, 0)); mainPanel.setLayout(new BorderLayout(0, 0)); // This panel is now the main content provider setContentPane(mainPanel); // SET UP TABS // =========== // Set up the tabs tabHolder = new JTabbedPane(JTabbedPane.TOP); // We always want the tabs to be horizontal //tabHolder.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT); // Add these tabs to the main panel mainPanel.add(tabHolder); // CONTROLS PANEL TAB // ================== // Make a new panel and add it to our tabs tab_controls = new JPanel(); tabHolder.addTab(null, icon_controls, tab_controls, "Controls"); // Set up layout tab_controls.setLayout(new SpringLayout()); // Permissions selection interface tab_controls.add(new JLabel("Permissions Plugin:")); // Combo selection combo_exportSelection = new JComboBox<String>(ExportCodeDatabase.getComboBoxSelectionStuff()); combo_exportSelection.setSelectedIndex(0); combo_exportSelection.setPreferredSize(new Dimension(200, combo_exportSelection.getPreferredSize().height)); combo_exportSelection.setMaximumSize(combo_exportSelection.getPreferredSize()); tab_controls.add(combo_exportSelection); // tab_controls.add(new JLabel("Permissions Plugin:")); // SpringUtilities.makeCompactGrid(tab_controls, 1, 2, 15, 5, 5, 5); tab_classes = new JPanel(); tabHolder.addTab(null, icon_classes, tab_classes, "Classes"); tab_classes.setLayout(new SpringLayout()); tab_permissions = new JPanel(); tabHolder.addTab(null, icon_permissions, tab_permissions, "Class Permissions"); tab_permissions.setLayout(new BorderLayout(0, 0)); tab_users = new JPanel(); tabHolder.addTab(null, icon_users, tab_users, "Users"); tab_users.setLayout(new BorderLayout(0, 0)); } // // Load our pretty images private static void loadImagesAndIcons() { icon_controls = new ImageIcon("resources/images/preferences-desktop.png", null); icon_classes = new ImageIcon("resources/images/preferences-system-windows.png", null); icon_permissions = new ImageIcon("resources/images/accessories-text-editor.png", null); icon_users = new ImageIcon("resources/images/system-users.png", null); img_wallpaper = loadImageWithComplaints("wallpaper.png"); } // Auxilary method for loading images with "handled" errors private static Image loadImageWithComplaints(String filename) { Image returnVal = null; try { returnVal = ImageIO.read(new File(dir_images + filename)); } catch (IOException e) { System.err.println("Could not load image: " + filename); } return returnVal; } // Set the LAF to something that looks cool private static void setupLookAndFeel() { // Try set it to nimbus try { for(LookAndFeelInfo laf : UIManager.getInstalledLookAndFeels()) { if(laf.getName().equalsIgnoreCase("Nimbus")) { UIManager.setLookAndFeel(laf.getClassName()); isNimbus = true; return; } } } catch(Exception e) { System.err.println("Could not set LAF to Nimbus!"); }; } // This is where the magic begins public static void main(String args[]) { // Load export types ExportCodeDatabase.registerTypes(); // Make it look cool loadImagesAndIcons(); setupLookAndFeel(); // Do this stuff later (which in java-ese it means that we do it almost now) EventQueue.invokeLater(new Runnable() { // Do this public void run() { // Try try { // Make a new me Main main = new Main(); // And then show it main.setVisible(true); } // If you fail catch (Exception e) { // Bleh e.printStackTrace(); } } }); } }
arial font
java/naftoreiclag/easypermsmaker/Main.java
arial font
Java
epl-1.0
48fff50770a12bd46418243c1ed721a9d6754674
0
asupdev/asup,asupdev/asup,asupdev/asup
/** * Copyright (c) 2012, 2014 Sme.UP and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * * Contributors: * Mattia Rocchi - Initial API and implementation * Giuliano Giancristofaro - Implementation */ package org.asup.dk.compiler.rpj.writer; import java.io.IOException; import java.util.Collection; import java.util.List; import javax.annotation.PostConstruct; import javax.inject.Inject; import org.asup.dk.compiler.QCompilationSetup; import org.asup.dk.compiler.QCompilationUnit; import org.asup.dk.compiler.QCompilerLinker; import org.asup.fw.core.annotation.Supported; import org.asup.fw.core.annotation.ToDo; import org.asup.fw.core.annotation.Unsupported; import org.asup.il.core.QConversion; import org.asup.il.core.QNamedNode; import org.asup.il.core.QTerm; import org.asup.il.data.QBufferedData; import org.asup.il.data.QDataTerm; import org.asup.il.data.annotation.Entry; import org.asup.il.esql.CursorType; import org.asup.il.esql.QCursor; import org.asup.il.esql.QCursorTerm; import org.asup.il.esql.QStatement; import org.asup.il.esql.QStatementTerm; import org.asup.il.esql.annotation.CursorDef; import org.asup.il.expr.IntegratedLanguageExpressionRuntimeException; import org.asup.il.flow.QBlock; import org.asup.il.flow.QCallableUnit; import org.asup.il.flow.QDataSection; import org.asup.il.flow.QEntryParameter; import org.asup.il.flow.QParameterList; import org.asup.il.flow.QPrototype; import org.asup.il.flow.QRoutine; import org.asup.il.flow.QUnit; import org.asup.il.isam.QDataSet; import org.asup.il.isam.QDataSetTerm; import org.asup.il.isam.QKSDataSet; import org.asup.il.isam.QKeyListTerm; import org.asup.il.isam.QRRDataSet; import org.asup.il.isam.annotation.DataSetDef; import org.asup.os.type.pgm.rpj.RPJServiceSupport; import org.eclipse.jdt.core.dom.AST; import org.eclipse.jdt.core.dom.ASTNode; import org.eclipse.jdt.core.dom.ASTParser; import org.eclipse.jdt.core.dom.ArrayCreation; import org.eclipse.jdt.core.dom.ArrayInitializer; import org.eclipse.jdt.core.dom.Block; import org.eclipse.jdt.core.dom.EnumConstantDeclaration; import org.eclipse.jdt.core.dom.EnumDeclaration; import org.eclipse.jdt.core.dom.Expression; import org.eclipse.jdt.core.dom.ExpressionStatement; import org.eclipse.jdt.core.dom.FieldAccess; import org.eclipse.jdt.core.dom.FieldDeclaration; import org.eclipse.jdt.core.dom.MarkerAnnotation; import org.eclipse.jdt.core.dom.MethodDeclaration; import org.eclipse.jdt.core.dom.MethodInvocation; import org.eclipse.jdt.core.dom.Modifier; import org.eclipse.jdt.core.dom.Modifier.ModifierKeyword; import org.eclipse.jdt.core.dom.ParameterizedType; import org.eclipse.jdt.core.dom.ReturnStatement; import org.eclipse.jdt.core.dom.SingleVariableDeclaration; import org.eclipse.jdt.core.dom.Type; import org.eclipse.jdt.core.dom.VariableDeclarationFragment; public abstract class JDTCallableUnitWriter extends JDTUnitWriter { public JDTCallableUnitWriter(JDTNamedNodeWriter root, QCompilationUnit compilationUnit, QCompilationSetup compilationSetup, String name) { super(root, compilationUnit, compilationSetup, name); } @SuppressWarnings("unchecked") public void writeLabels(Collection<String> labels) { if (labels.isEmpty()) return; EnumDeclaration enumType = getAST().newEnumDeclaration(); enumType.setName(getAST().newSimpleName("TAG")); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.PUBLIC_KEYWORD)); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.STATIC_KEYWORD)); // elements int num = 0; for (String label : labels) { EnumConstantDeclaration constantDeclaration = getAST().newEnumConstantDeclaration(); constantDeclaration.setName(getAST().newSimpleName(normalizeEnumName(label))); enumType.enumConstants().add(num, constantDeclaration); num++; } getTarget().bodyDeclarations().add(enumType); } @SuppressWarnings("unchecked") public void writeMessages(Collection<String> messages) { EnumDeclaration enumType = getAST().newEnumDeclaration(); enumType.setName(getAST().newSimpleName("QCPFMSG")); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.PUBLIC_KEYWORD)); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.STATIC_KEYWORD)); // elements int num = 0; for (String message : messages) { if (message.equalsIgnoreCase("CPF0000")) continue; EnumConstantDeclaration constantDeclaration = getAST().newEnumConstantDeclaration(); constantDeclaration.setName(getAST().newSimpleName(normalizeEnumName(message))); enumType.enumConstants().add(num, constantDeclaration); num++; } getTarget().bodyDeclarations().add(enumType); } @SuppressWarnings("unchecked") public void writeModuleFields(List<String> modules, boolean public_) { for (String module : modules) { String moduleName = getCompilationUnit().normalizeTermName(module).toUpperCase(); VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); // writeAnnotation(field, ModuleDef.class, "name", moduleName); writeAnnotation(field, Inject.class); // writeAnnotation(field, Named.class, "value", moduleName); if (public_) field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); else field.modifiers().add(getAST().newModifier(ModifierKeyword.PRIVATE_KEYWORD)); field.setType(getAST().newSimpleType(getAST().newName(moduleName))); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(module))); getTarget().bodyDeclarations().add(field); } } public void writeDataFields(QDataSection dataSection) { // fields for (QDataTerm<?> dataTerm : dataSection.getDatas()) { if (dataTerm.getDefinition() == null) continue; dataTerm = getCompilationUnit().getDataTerm(dataTerm.getName(), true); writePublicField(dataTerm, false); } } @SuppressWarnings("unchecked") public void writeDataSets(List<QDataSetTerm> dataSets) throws IOException { writeImport(QDataSet.class); for (QDataSetTerm dataSet : dataSets) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); writeAnnotation(field, DataSetDef.class, "name", dataSet.getFileName()); writeAnnotation(field, DataSetDef.class, "userOpen", dataSet.isUserOpen()); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); String className = null; if (dataSet.isKeyedAccess()) { writeImport(QKSDataSet.class); className = QKSDataSet.class.getSimpleName(); } else { writeImport(QRRDataSet.class); className = QRRDataSet.class.getSimpleName(); } Type dataSetType = getAST().newSimpleType(getAST().newSimpleName(className)); ParameterizedType parType = getAST().newParameterizedType(dataSetType); if (dataSet.getFileName().equals("PRT198")) parType.typeArguments().add(getAST().newWildcardType()); else { QCompilerLinker compilerLinker = dataSet.getFacet(QCompilerLinker.class); if (compilerLinker != null) { parType.typeArguments().add(getAST().newSimpleType(getAST().newName(compilerLinker.getLinkedClass().getName().split("\\.")))); } else { if(dataSet.getRecord() != null) compilerLinker = dataSet.getRecord().getFacet(QCompilerLinker.class); if (compilerLinker != null) { String argument = dataSet.getRecord().getName(); parType.typeArguments().add(getAST().newSimpleType(getAST().newSimpleName(argument))); } else { String argument = dataSet.getFileName(); parType.typeArguments().add(getAST().newSimpleType(getAST().newSimpleName(argument))); } } } field.setType(parType); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(dataSet.getName()))); getTarget().bodyDeclarations().add(field); if(dataSet.getRecord() != null && dataSet.getRecord().getFacet(QCompilerLinker.class) != null) writeInnerTerm(dataSet.getRecord()); } } public void writeKeyLists(List<QKeyListTerm> keyLists) { writeImport(QBufferedData.class); for (QKeyListTerm keyList : keyLists) { writeKeyList(keyList); } } @SuppressWarnings("unchecked") public void writeKeyList(QKeyListTerm keyList) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(keyList.getName()))); FieldDeclaration field = getAST().newFieldDeclaration(variable); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Type bufferedType = getAST().newSimpleType(getAST().newSimpleName(QBufferedData.class.getSimpleName())); field.setType(getAST().newArrayType(bufferedType)); // array of bufferedData ArrayCreation arrayCreation = getAST().newArrayCreation(); arrayCreation.setType(getAST().newArrayType(getAST().newSimpleType(getAST().newSimpleName(QBufferedData.class.getSimpleName())))); ArrayInitializer arrayInitializer = getAST().newArrayInitializer(); for (String keyField : keyList.getKeyFields()) { QNamedNode namedNode = getCompilationUnit().getNamedNode(keyField, true); String qualifiedName = getCompilationUnit().getQualifiedName(namedNode); arrayInitializer.expressions().add(buildExpression(qualifiedName)); } arrayCreation.setInitializer(arrayInitializer); variable.setInitializer(arrayCreation); getTarget().bodyDeclarations().add(field); } @SuppressWarnings("unchecked") public void writeCursors(List<QCursorTerm> cursors) { writeImport(QCursor.class); writeImport(CursorType.class); for (QCursorTerm cursorTerm : cursors) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); if (cursorTerm.isHold()) writeAnnotation(field, CursorDef.class, "hold", cursorTerm.isHold()); writeAnnotation(field, CursorDef.class, "type", cursorTerm.getCursorType()); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Type dataSetType = getAST().newSimpleType(getAST().newSimpleName(QCursor.class.getSimpleName())); field.setType(dataSetType); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(cursorTerm.getName()))); getTarget().bodyDeclarations().add(field); } } @SuppressWarnings("unchecked") public void writeStatements(List<QStatementTerm> statements) { writeImport(QStatement.class); for (QStatementTerm statementTerm : statements) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Type dataSetType = getAST().newSimpleType(getAST().newSimpleName(QStatement.class.getSimpleName())); field.setType(dataSetType); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(statementTerm.getName()))); getTarget().bodyDeclarations().add(field); } } @SuppressWarnings("unchecked") public void writeRoutine(QRoutine routine) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(routine.getName()))); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); // writeSuppressWarning(methodDeclaration); Block block = getAST().newBlock(); methodDeclaration.setBody(block); if (routine.getMain() == null) return; // write java AST JDTStatementWriter statementWriter = getCompilationUnit().getContext().make(JDTStatementWriter.class); statementWriter.setAST(getAST()); statementWriter.getBlocks().push(block); if (routine.getMain() instanceof QBlock) { QBlock qBlock = (QBlock) routine.getMain(); for (org.asup.il.flow.QStatement qStatement : qBlock.getStatements()) qStatement.accept(statementWriter); } else routine.getMain().accept(statementWriter); statementWriter.getBlocks().pop(); } @SuppressWarnings("unchecked") public void writePrototype(QPrototype<?> prototype) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(prototype.getName()))); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); // writeSuppressWarning(methodDeclaration); if (prototype.getDelegate() != null) { Type type = getJavaType(prototype.getDelegate()); methodDeclaration.setReturnType2(type); } if (prototype.getEntry() != null) { int p = 0; for (QEntryParameter<?> entryParameter : prototype.getEntry().getParameters()) { QTerm parameterDelegate = entryParameter.getDelegate(); SingleVariableDeclaration singleVar = getAST().newSingleVariableDeclaration(); String parameterName = parameterDelegate.getName(); if (parameterName == null) parameterName = "arg" + p; singleVar.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(parameterName))); if (parameterDelegate instanceof QDataTerm) { QDataTerm<?> dataTerm = (QDataTerm<?>) parameterDelegate; // primitive if (dataTerm.isConstant()) singleVar.setType(getJavaPrimitive(dataTerm)); else { Type type = getJavaType(dataTerm); singleVar.setType(type); } } else if (parameterDelegate instanceof QDataSetTerm) { Type dataSet = getAST().newSimpleType(getAST().newSimpleName(QDataSet.class.getSimpleName())); ParameterizedType parType = getAST().newParameterizedType(dataSet); parType.typeArguments().add(getAST().newWildcardType()); singleVar.setType(parType); } methodDeclaration.parameters().add(singleVar); p++; } } Block block = getAST().newBlock(); methodDeclaration.setBody(block); // write java AST JDTStatementWriter statementWriter = getCompilationUnit().getContext().make(JDTStatementWriter.class); statementWriter.setAST(getAST()); statementWriter.getBlocks().push(block); if (prototype.getDelegate() != null) { ReturnStatement returnStatement = getAST().newReturnStatement(); // returnStatement.setExpression(getAST().newNullLiteral()); // block.statements().add(returnStatement); block.statements().add(getReturnStatement(returnStatement, prototype, methodDeclaration)); } statementWriter.getBlocks().pop(); } @SuppressWarnings("unchecked") public void writeMain(QParameterList parameterList, String name) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(name)); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); // writeSuppressWarning(methodDeclaration); MarkerAnnotation entryAnnotation = getAST().newMarkerAnnotation(); entryAnnotation.setTypeName(getAST().newSimpleName(Entry.class.getSimpleName())); writeImport(Entry.class); methodDeclaration.modifiers().add(entryAnnotation); for (String parameterName : parameterList.getParameters()) { QDataTerm<?> dataTerm = getCompilationUnit().getDataTerm(parameterName, true); SingleVariableDeclaration parameterVariable = getAST().newSingleVariableDeclaration(); parameterVariable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(dataTerm.getName()))); Type type = getJavaType(dataTerm); parameterVariable.setType(type); QConversion conversion = dataTerm.getFacet(QConversion.class); if (conversion != null) { MarkerAnnotation conversionAnnotation = getAST().newMarkerAnnotation(); switch (conversion.getStatus()) { case POSSIBLE: break; case SUPPORTED: writeImport(Supported.class); conversionAnnotation.setTypeName(getAST().newSimpleName(Supported.class.getSimpleName())); parameterVariable.modifiers().add(conversionAnnotation); break; case TODO: writeImport(ToDo.class); conversionAnnotation.setTypeName(getAST().newSimpleName(ToDo.class.getSimpleName())); parameterVariable.modifiers().add(conversionAnnotation); break; case UNSUPPORTED: writeImport(Unsupported.class); conversionAnnotation.setTypeName(getAST().newSimpleName(Unsupported.class.getSimpleName())); parameterVariable.modifiers().add(conversionAnnotation); break; } } writeDataDefAnnotation(parameterVariable, dataTerm.getDefinition()); methodDeclaration.parameters().add(parameterVariable); } Block block = getAST().newBlock(); methodDeclaration.setBody(block); } @SuppressWarnings("unchecked") public void writeInit() { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName("qInit")); MarkerAnnotation entryAnnotation = getAST().newMarkerAnnotation(); entryAnnotation.setTypeName(getAST().newSimpleName(PostConstruct.class.getSimpleName())); writeImport(PostConstruct.class); methodDeclaration.modifiers().add(entryAnnotation); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Block block = getAST().newBlock(); methodDeclaration.setBody(block); QRoutine qInzsr = getCompilationUnit().getRoutine("*INZSR", true); if (qInzsr != null) { if (qInzsr.getParent() == getCompilationUnit().getRoot()) { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation.setExpression(getAST().newThisExpression()); methodInvocation.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(qInzsr.getName()))); ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } else { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(qInzsr.getName()))); methodInvocation.setExpression(buildExpression(getCompilationUnit().getQualifiedName(qInzsr))); ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } } // £INIZI QRoutine £inizi = getCompilationUnit().getRoutine("£INIZI", false); if (£inizi != null) { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation = getAST().newMethodInvocation(); methodInvocation.setExpression(getAST().newThisExpression()); methodInvocation.setName(getAST().newSimpleName("£inizi")); ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } } @SuppressWarnings("unchecked") public void writeEntry(QParameterList parameterList, String name) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(name)); MarkerAnnotation entryAnnotation = getAST().newMarkerAnnotation(); entryAnnotation.setTypeName(getAST().newSimpleName(Entry.class.getSimpleName())); writeImport(Entry.class); methodDeclaration.modifiers().add(entryAnnotation); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); for (String parameterName : parameterList.getParameters()) { QDataTerm<?> dataTerm = getCompilationUnit().getDataTerm(parameterName, true); SingleVariableDeclaration parameterVariable = getAST().newSingleVariableDeclaration(); parameterVariable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(dataTerm.getName()))); Type type = getJavaType(dataTerm); parameterVariable.setType(type); writeDataDefAnnotation(parameterVariable, dataTerm.getDefinition()); methodDeclaration.parameters().add(parameterVariable); } Block block = getAST().newBlock(); methodDeclaration.setBody(block); for (String parameterName : parameterList.getParameters()) { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation.setName(getAST().newSimpleName("assign")); methodInvocation.setExpression(getAST().newSimpleName(getCompilationUnit().normalizeTermName(parameterName))); QDataTerm<?> dataTerm = getCompilationUnit().getDataTerm(parameterName, true); String qualifiedName = getCompilationUnit().getQualifiedName(dataTerm); String[] fieldNames = qualifiedName.split("\\."); if(fieldNames.length>1) { methodInvocation.arguments().add(buildExpression(qualifiedName)); } else { FieldAccess targetAccess = getAST().newFieldAccess(); targetAccess.setExpression(getAST().newThisExpression()); for(int i=0; i<fieldNames.length; i++) { targetAccess.setName(getAST().newSimpleName(fieldNames[i])); if(i<fieldNames.length-1) { FieldAccess childAccess = getAST().newFieldAccess(); childAccess.setExpression(targetAccess); targetAccess = childAccess; } } methodInvocation.arguments().add(targetAccess); } ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } // this.main MethodInvocation mainInvocation = getAST().newMethodInvocation(); mainInvocation.setExpression(getAST().newThisExpression()); mainInvocation.setName(getAST().newSimpleName("main")); ExpressionStatement mainStatement = getAST().newExpressionStatement(mainInvocation); block.statements().add(mainStatement); } public void refactCallableUnit(QCallableUnit callableUnit) { refactUnit(callableUnit); // main if (callableUnit.getFlowSection() != null) { for (QUnit unit : callableUnit.getFlowSection().getRoutines()) { refactUnit(unit); } } } @SuppressWarnings("unchecked") private ReturnStatement getReturnStatement(ReturnStatement returnStatement, QPrototype<?> prototype, MethodDeclaration methodDeclaration) { String namePrototype = getCompilationUnit().normalizeTermName(prototype.getName()); MethodInvocation methodInvocation = getAST().newMethodInvocation(); switch (namePrototype) { case "p_rxatt": writeImport(RPJServiceSupport.class); methodInvocation.setExpression(getAST().newName("qJAX")); methodInvocation.setName(getAST().newSimpleName(namePrototype)); for (Object entryParameter : methodDeclaration.parameters()) { SingleVariableDeclaration singleVariableDeclaration = (SingleVariableDeclaration) entryParameter; methodInvocation.arguments().add(getAST().newSimpleName(singleVariableDeclaration.getName().toString())); } returnStatement.setExpression(methodInvocation); break; case "p_rxsos": writeImport(RPJServiceSupport.class); methodInvocation.setExpression(getAST().newName("qJAX")); methodInvocation.setName(getAST().newSimpleName(namePrototype)); for (Object entryParameter : methodDeclaration.parameters()) { SingleVariableDeclaration singleVariableDeclaration = (SingleVariableDeclaration) entryParameter; methodInvocation.arguments().add(getAST().newSimpleName(singleVariableDeclaration.getName().toString())); } returnStatement.setExpression(methodInvocation); break; case "p_rxlate": writeImport(RPJServiceSupport.class); methodInvocation.setExpression(getAST().newName("qJAX")); methodInvocation.setName(getAST().newSimpleName(namePrototype)); for (Object entryParameter : methodDeclaration.parameters()) { SingleVariableDeclaration singleVariableDeclaration = (SingleVariableDeclaration) entryParameter; methodInvocation.arguments().add(getAST().newSimpleName(singleVariableDeclaration.getName().toString())); } returnStatement.setExpression(methodInvocation); break; default: returnStatement.setExpression(getAST().newNullLiteral()); } return returnStatement; } private Expression buildExpression(String expression) { ASTParser parser = ASTParser.newParser(AST.JLS8); parser.setKind(ASTParser.K_EXPRESSION); parser.setSource(expression.toCharArray()); ASTNode node = parser.createAST(null); if (node.getLength() == 0) throw new IntegratedLanguageExpressionRuntimeException("Invalid java conversion: " + expression); Expression jdtExpression = (Expression) node; return (Expression) ASTNode.copySubtree(getAST(), jdtExpression); } }
org.asup.dk.compiler.rpj/src/org/asup/dk/compiler/rpj/writer/JDTCallableUnitWriter.java
/** * Copyright (c) 2012, 2014 Sme.UP and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * * Contributors: * Mattia Rocchi - Initial API and implementation * Giuliano Giancristofaro - Implementation */ package org.asup.dk.compiler.rpj.writer; import java.io.IOException; import java.util.Collection; import java.util.List; import javax.annotation.PostConstruct; import javax.inject.Inject; import org.asup.dk.compiler.QCompilationSetup; import org.asup.dk.compiler.QCompilationUnit; import org.asup.dk.compiler.QCompilerLinker; import org.asup.fw.core.annotation.Supported; import org.asup.fw.core.annotation.ToDo; import org.asup.fw.core.annotation.Unsupported; import org.asup.il.core.QConversion; import org.asup.il.core.QNamedNode; import org.asup.il.core.QTerm; import org.asup.il.data.QBufferedData; import org.asup.il.data.QDataTerm; import org.asup.il.data.annotation.Entry; import org.asup.il.esql.CursorType; import org.asup.il.esql.QCursor; import org.asup.il.esql.QCursorTerm; import org.asup.il.esql.QStatement; import org.asup.il.esql.QStatementTerm; import org.asup.il.esql.annotation.CursorDef; import org.asup.il.expr.IntegratedLanguageExpressionRuntimeException; import org.asup.il.flow.QBlock; import org.asup.il.flow.QCallableUnit; import org.asup.il.flow.QDataSection; import org.asup.il.flow.QEntryParameter; import org.asup.il.flow.QModule; import org.asup.il.flow.QParameterList; import org.asup.il.flow.QProgram; import org.asup.il.flow.QPrototype; import org.asup.il.flow.QRoutine; import org.asup.il.flow.QUnit; import org.asup.il.isam.QDataSet; import org.asup.il.isam.QDataSetTerm; import org.asup.il.isam.QKSDataSet; import org.asup.il.isam.QKeyListTerm; import org.asup.il.isam.QRRDataSet; import org.asup.il.isam.annotation.DataSetDef; import org.asup.os.type.pgm.rpj.RPJServiceSupport; import org.eclipse.jdt.core.dom.AST; import org.eclipse.jdt.core.dom.ASTNode; import org.eclipse.jdt.core.dom.ASTParser; import org.eclipse.jdt.core.dom.ArrayCreation; import org.eclipse.jdt.core.dom.ArrayInitializer; import org.eclipse.jdt.core.dom.Block; import org.eclipse.jdt.core.dom.EnumConstantDeclaration; import org.eclipse.jdt.core.dom.EnumDeclaration; import org.eclipse.jdt.core.dom.Expression; import org.eclipse.jdt.core.dom.ExpressionStatement; import org.eclipse.jdt.core.dom.FieldAccess; import org.eclipse.jdt.core.dom.FieldDeclaration; import org.eclipse.jdt.core.dom.MarkerAnnotation; import org.eclipse.jdt.core.dom.MethodDeclaration; import org.eclipse.jdt.core.dom.MethodInvocation; import org.eclipse.jdt.core.dom.Modifier; import org.eclipse.jdt.core.dom.Modifier.ModifierKeyword; import org.eclipse.jdt.core.dom.ParameterizedType; import org.eclipse.jdt.core.dom.ReturnStatement; import org.eclipse.jdt.core.dom.SingleVariableDeclaration; import org.eclipse.jdt.core.dom.Type; import org.eclipse.jdt.core.dom.VariableDeclarationFragment; public abstract class JDTCallableUnitWriter extends JDTUnitWriter { public JDTCallableUnitWriter(JDTNamedNodeWriter root, QCompilationUnit compilationUnit, QCompilationSetup compilationSetup, String name) { super(root, compilationUnit, compilationSetup, name); } @SuppressWarnings("unchecked") public void writeLabels(Collection<String> labels) { if (labels.isEmpty()) return; EnumDeclaration enumType = getAST().newEnumDeclaration(); enumType.setName(getAST().newSimpleName("TAG")); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.PUBLIC_KEYWORD)); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.STATIC_KEYWORD)); // elements int num = 0; for (String label : labels) { EnumConstantDeclaration constantDeclaration = getAST().newEnumConstantDeclaration(); constantDeclaration.setName(getAST().newSimpleName(normalizeEnumName(label))); enumType.enumConstants().add(num, constantDeclaration); num++; } getTarget().bodyDeclarations().add(enumType); } @SuppressWarnings("unchecked") public void writeMessages(Collection<String> messages) { EnumDeclaration enumType = getAST().newEnumDeclaration(); enumType.setName(getAST().newSimpleName("QCPFMSG")); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.PUBLIC_KEYWORD)); enumType.modifiers().add(getAST().newModifier(Modifier.ModifierKeyword.STATIC_KEYWORD)); // elements int num = 0; for (String message : messages) { if (message.equalsIgnoreCase("CPF0000")) continue; EnumConstantDeclaration constantDeclaration = getAST().newEnumConstantDeclaration(); constantDeclaration.setName(getAST().newSimpleName(normalizeEnumName(message))); enumType.enumConstants().add(num, constantDeclaration); num++; } getTarget().bodyDeclarations().add(enumType); } @SuppressWarnings("unchecked") public void writeModuleFields(List<String> modules, boolean public_) { for (String module : modules) { String moduleName = getCompilationUnit().normalizeTermName(module).toUpperCase(); VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); // writeAnnotation(field, ModuleDef.class, "name", moduleName); writeAnnotation(field, Inject.class); // writeAnnotation(field, Named.class, "value", moduleName); if (public_) field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); else field.modifiers().add(getAST().newModifier(ModifierKeyword.PRIVATE_KEYWORD)); field.setType(getAST().newSimpleType(getAST().newName(moduleName))); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(module))); getTarget().bodyDeclarations().add(field); } } public void writeDataFields(QDataSection dataSection) { // fields for (QDataTerm<?> dataTerm : dataSection.getDatas()) { if (dataTerm.getDefinition() == null) continue; dataTerm = getCompilationUnit().getDataTerm(dataTerm.getName(), true); writePublicField(dataTerm, false); } } @SuppressWarnings("unchecked") public void writeDataSets(List<QDataSetTerm> dataSets) throws IOException { writeImport(QDataSet.class); for (QDataSetTerm dataSet : dataSets) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); writeAnnotation(field, DataSetDef.class, "name", dataSet.getFileName()); writeAnnotation(field, DataSetDef.class, "userOpen", dataSet.isUserOpen()); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); String className = null; if (dataSet.isKeyedAccess()) { writeImport(QKSDataSet.class); className = QKSDataSet.class.getSimpleName(); } else { writeImport(QRRDataSet.class); className = QRRDataSet.class.getSimpleName(); } Type dataSetType = getAST().newSimpleType(getAST().newSimpleName(className)); ParameterizedType parType = getAST().newParameterizedType(dataSetType); if (dataSet.getFileName().equals("PRT198")) parType.typeArguments().add(getAST().newWildcardType()); else { QCompilerLinker compilerLinker = dataSet.getFacet(QCompilerLinker.class); if (compilerLinker != null) { parType.typeArguments().add(getAST().newSimpleType(getAST().newName(compilerLinker.getLinkedClass().getName().split("\\.")))); } else { if(dataSet.getRecord() != null) compilerLinker = dataSet.getRecord().getFacet(QCompilerLinker.class); if (compilerLinker != null) { String argument = dataSet.getRecord().getName(); parType.typeArguments().add(getAST().newSimpleType(getAST().newSimpleName(argument))); } else { String argument = dataSet.getFileName(); parType.typeArguments().add(getAST().newSimpleType(getAST().newSimpleName(argument))); } } } field.setType(parType); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(dataSet.getName()))); getTarget().bodyDeclarations().add(field); if(dataSet.getRecord() != null && dataSet.getRecord().getFacet(QCompilerLinker.class) != null) writeInnerTerm(dataSet.getRecord()); } } public void writeKeyLists(List<QKeyListTerm> keyLists) { writeImport(QBufferedData.class); for (QKeyListTerm keyList : keyLists) { writeKeyList(keyList); } } @SuppressWarnings("unchecked") public void writeKeyList(QKeyListTerm keyList) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(keyList.getName()))); FieldDeclaration field = getAST().newFieldDeclaration(variable); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Type bufferedType = getAST().newSimpleType(getAST().newSimpleName(QBufferedData.class.getSimpleName())); field.setType(getAST().newArrayType(bufferedType)); // array of bufferedData ArrayCreation arrayCreation = getAST().newArrayCreation(); arrayCreation.setType(getAST().newArrayType(getAST().newSimpleType(getAST().newSimpleName(QBufferedData.class.getSimpleName())))); ArrayInitializer arrayInitializer = getAST().newArrayInitializer(); for (String keyField : keyList.getKeyFields()) { QNamedNode namedNode = getCompilationUnit().getNamedNode(keyField, true); String qualifiedName = getCompilationUnit().getQualifiedName(namedNode); arrayInitializer.expressions().add(buildExpression(qualifiedName)); } arrayCreation.setInitializer(arrayInitializer); variable.setInitializer(arrayCreation); getTarget().bodyDeclarations().add(field); } @SuppressWarnings("unchecked") public void writeCursors(List<QCursorTerm> cursors) { writeImport(QCursor.class); writeImport(CursorType.class); for (QCursorTerm cursorTerm : cursors) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); if (cursorTerm.isHold()) writeAnnotation(field, CursorDef.class, "hold", cursorTerm.isHold()); writeAnnotation(field, CursorDef.class, "type", cursorTerm.getCursorType()); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Type dataSetType = getAST().newSimpleType(getAST().newSimpleName(QCursor.class.getSimpleName())); field.setType(dataSetType); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(cursorTerm.getName()))); getTarget().bodyDeclarations().add(field); } } @SuppressWarnings("unchecked") public void writeStatements(List<QStatementTerm> statements) { writeImport(QStatement.class); for (QStatementTerm statementTerm : statements) { VariableDeclarationFragment variable = getAST().newVariableDeclarationFragment(); FieldDeclaration field = getAST().newFieldDeclaration(variable); field.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Type dataSetType = getAST().newSimpleType(getAST().newSimpleName(QStatement.class.getSimpleName())); field.setType(dataSetType); variable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(statementTerm.getName()))); getTarget().bodyDeclarations().add(field); } } @SuppressWarnings("unchecked") public void writeRoutine(QRoutine routine) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(routine.getName()))); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); // writeSuppressWarning(methodDeclaration); Block block = getAST().newBlock(); methodDeclaration.setBody(block); if (routine.getMain() == null) return; // write java AST JDTStatementWriter statementWriter = getCompilationUnit().getContext().make(JDTStatementWriter.class); statementWriter.setAST(getAST()); statementWriter.getBlocks().push(block); if (routine.getMain() instanceof QBlock) { QBlock qBlock = (QBlock) routine.getMain(); for (org.asup.il.flow.QStatement qStatement : qBlock.getStatements()) qStatement.accept(statementWriter); } else routine.getMain().accept(statementWriter); statementWriter.getBlocks().pop(); } @SuppressWarnings("unchecked") public void writePrototype(QPrototype<?> prototype) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(prototype.getName()))); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); // writeSuppressWarning(methodDeclaration); if (prototype.getDelegate() != null) { Type type = getJavaType(prototype.getDelegate()); methodDeclaration.setReturnType2(type); } if (prototype.getEntry() != null) { int p = 0; for (QEntryParameter<?> entryParameter : prototype.getEntry().getParameters()) { QTerm parameterDelegate = entryParameter.getDelegate(); SingleVariableDeclaration singleVar = getAST().newSingleVariableDeclaration(); String parameterName = parameterDelegate.getName(); if (parameterName == null) parameterName = "arg" + p; singleVar.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(parameterName))); if (parameterDelegate instanceof QDataTerm) { QDataTerm<?> dataTerm = (QDataTerm<?>) parameterDelegate; // primitive if (dataTerm.isConstant()) singleVar.setType(getJavaPrimitive(dataTerm)); else { Type type = getJavaType(dataTerm); singleVar.setType(type); } } else if (parameterDelegate instanceof QDataSetTerm) { Type dataSet = getAST().newSimpleType(getAST().newSimpleName(QDataSet.class.getSimpleName())); ParameterizedType parType = getAST().newParameterizedType(dataSet); parType.typeArguments().add(getAST().newWildcardType()); singleVar.setType(parType); } methodDeclaration.parameters().add(singleVar); p++; } } Block block = getAST().newBlock(); methodDeclaration.setBody(block); // write java AST JDTStatementWriter statementWriter = getCompilationUnit().getContext().make(JDTStatementWriter.class); statementWriter.setAST(getAST()); statementWriter.getBlocks().push(block); if (prototype.getDelegate() != null) { ReturnStatement returnStatement = getAST().newReturnStatement(); // returnStatement.setExpression(getAST().newNullLiteral()); // block.statements().add(returnStatement); block.statements().add(getReturnStatement(returnStatement, prototype, methodDeclaration)); } statementWriter.getBlocks().pop(); } @SuppressWarnings("unchecked") public void writeMain(QParameterList parameterList, String name) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(name)); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); // writeSuppressWarning(methodDeclaration); MarkerAnnotation entryAnnotation = getAST().newMarkerAnnotation(); entryAnnotation.setTypeName(getAST().newSimpleName(Entry.class.getSimpleName())); writeImport(Entry.class); methodDeclaration.modifiers().add(entryAnnotation); for (String parameterName : parameterList.getParameters()) { QDataTerm<?> dataTerm = getCompilationUnit().getDataTerm(parameterName, true); SingleVariableDeclaration parameterVariable = getAST().newSingleVariableDeclaration(); parameterVariable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(dataTerm.getName()))); Type type = getJavaType(dataTerm); parameterVariable.setType(type); QConversion conversion = dataTerm.getFacet(QConversion.class); if (conversion != null) { MarkerAnnotation conversionAnnotation = getAST().newMarkerAnnotation(); switch (conversion.getStatus()) { case POSSIBLE: break; case SUPPORTED: writeImport(Supported.class); conversionAnnotation.setTypeName(getAST().newSimpleName(Supported.class.getSimpleName())); parameterVariable.modifiers().add(conversionAnnotation); break; case TODO: writeImport(ToDo.class); conversionAnnotation.setTypeName(getAST().newSimpleName(ToDo.class.getSimpleName())); parameterVariable.modifiers().add(conversionAnnotation); break; case UNSUPPORTED: writeImport(Unsupported.class); conversionAnnotation.setTypeName(getAST().newSimpleName(Unsupported.class.getSimpleName())); parameterVariable.modifiers().add(conversionAnnotation); break; } } writeDataDefAnnotation(parameterVariable, dataTerm.getDefinition()); methodDeclaration.parameters().add(parameterVariable); } Block block = getAST().newBlock(); methodDeclaration.setBody(block); } @SuppressWarnings("unchecked") public void writeInit() { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName("qInit")); MarkerAnnotation entryAnnotation = getAST().newMarkerAnnotation(); entryAnnotation.setTypeName(getAST().newSimpleName(PostConstruct.class.getSimpleName())); writeImport(PostConstruct.class); methodDeclaration.modifiers().add(entryAnnotation); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); Block block = getAST().newBlock(); methodDeclaration.setBody(block); QRoutine qInzsr = getCompilationUnit().getRoutine("*INZSR", true); if (qInzsr != null) { if (qInzsr.getParent() instanceof QModule) { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation.setName(getAST().newSimpleName(getCompilationUnit().getQualifiedName(qInzsr))); methodInvocation.setExpression(getAST().newSimpleName("£mub")); ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } else if (qInzsr.getParent() instanceof QProgram) { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation.setExpression(getAST().newThisExpression()); methodInvocation.setName(getAST().newSimpleName(getCompilationUnit().getQualifiedName(qInzsr))); ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } else System.err.println("Unexpected condition: sdifb02xb67er23c23"); } // £INIZI QRoutine £inizi = getCompilationUnit().getRoutine("£INIZI", false); if (£inizi != null) { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation = getAST().newMethodInvocation(); methodInvocation.setExpression(getAST().newThisExpression()); methodInvocation.setName(getAST().newSimpleName("£inizi")); ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } } @SuppressWarnings("unchecked") public void writeEntry(QParameterList parameterList, String name) { MethodDeclaration methodDeclaration = getAST().newMethodDeclaration(); getTarget().bodyDeclarations().add(methodDeclaration); methodDeclaration.setName(getAST().newSimpleName(name)); MarkerAnnotation entryAnnotation = getAST().newMarkerAnnotation(); entryAnnotation.setTypeName(getAST().newSimpleName(Entry.class.getSimpleName())); writeImport(Entry.class); methodDeclaration.modifiers().add(entryAnnotation); methodDeclaration.modifiers().add(getAST().newModifier(ModifierKeyword.PUBLIC_KEYWORD)); for (String parameterName : parameterList.getParameters()) { QDataTerm<?> dataTerm = getCompilationUnit().getDataTerm(parameterName, true); SingleVariableDeclaration parameterVariable = getAST().newSingleVariableDeclaration(); parameterVariable.setName(getAST().newSimpleName(getCompilationUnit().normalizeTermName(dataTerm.getName()))); Type type = getJavaType(dataTerm); parameterVariable.setType(type); writeDataDefAnnotation(parameterVariable, dataTerm.getDefinition()); methodDeclaration.parameters().add(parameterVariable); } Block block = getAST().newBlock(); methodDeclaration.setBody(block); for (String parameterName : parameterList.getParameters()) { MethodInvocation methodInvocation = getAST().newMethodInvocation(); methodInvocation.setName(getAST().newSimpleName("assign")); methodInvocation.setExpression(getAST().newSimpleName(getCompilationUnit().normalizeTermName(parameterName))); QDataTerm<?> dataTerm = getCompilationUnit().getDataTerm(parameterName, true); String qualifiedName = getCompilationUnit().getQualifiedName(dataTerm); String[] fieldNames = qualifiedName.split("\\."); if(fieldNames.length>1) { methodInvocation.arguments().add(buildExpression(qualifiedName)); } else { FieldAccess targetAccess = getAST().newFieldAccess(); targetAccess.setExpression(getAST().newThisExpression()); for(int i=0; i<fieldNames.length; i++) { targetAccess.setName(getAST().newSimpleName(fieldNames[i])); if(i<fieldNames.length-1) { FieldAccess childAccess = getAST().newFieldAccess(); childAccess.setExpression(targetAccess); targetAccess = childAccess; } } methodInvocation.arguments().add(targetAccess); } ExpressionStatement expressionStatement = getAST().newExpressionStatement(methodInvocation); block.statements().add(expressionStatement); } // this.main MethodInvocation mainInvocation = getAST().newMethodInvocation(); mainInvocation.setExpression(getAST().newThisExpression()); mainInvocation.setName(getAST().newSimpleName("main")); ExpressionStatement mainStatement = getAST().newExpressionStatement(mainInvocation); block.statements().add(mainStatement); } public void refactCallableUnit(QCallableUnit callableUnit) { refactUnit(callableUnit); // main if (callableUnit.getFlowSection() != null) { for (QUnit unit : callableUnit.getFlowSection().getRoutines()) { refactUnit(unit); } } } @SuppressWarnings("unchecked") private ReturnStatement getReturnStatement(ReturnStatement returnStatement, QPrototype<?> prototype, MethodDeclaration methodDeclaration) { String namePrototype = getCompilationUnit().normalizeTermName(prototype.getName()); MethodInvocation methodInvocation = getAST().newMethodInvocation(); switch (namePrototype) { case "p_rxatt": writeImport(RPJServiceSupport.class); methodInvocation.setExpression(getAST().newName("qJAX")); methodInvocation.setName(getAST().newSimpleName(namePrototype)); for (Object entryParameter : methodDeclaration.parameters()) { SingleVariableDeclaration singleVariableDeclaration = (SingleVariableDeclaration) entryParameter; methodInvocation.arguments().add(getAST().newSimpleName(singleVariableDeclaration.getName().toString())); } returnStatement.setExpression(methodInvocation); break; case "p_rxsos": writeImport(RPJServiceSupport.class); methodInvocation.setExpression(getAST().newName("qJAX")); methodInvocation.setName(getAST().newSimpleName(namePrototype)); for (Object entryParameter : methodDeclaration.parameters()) { SingleVariableDeclaration singleVariableDeclaration = (SingleVariableDeclaration) entryParameter; methodInvocation.arguments().add(getAST().newSimpleName(singleVariableDeclaration.getName().toString())); } returnStatement.setExpression(methodInvocation); break; case "p_rxlate": writeImport(RPJServiceSupport.class); methodInvocation.setExpression(getAST().newName("qJAX")); methodInvocation.setName(getAST().newSimpleName(namePrototype)); for (Object entryParameter : methodDeclaration.parameters()) { SingleVariableDeclaration singleVariableDeclaration = (SingleVariableDeclaration) entryParameter; methodInvocation.arguments().add(getAST().newSimpleName(singleVariableDeclaration.getName().toString())); } returnStatement.setExpression(methodInvocation); break; default: returnStatement.setExpression(getAST().newNullLiteral()); } return returnStatement; } private Expression buildExpression(String expression) { ASTParser parser = ASTParser.newParser(AST.JLS8); parser.setKind(ASTParser.K_EXPRESSION); parser.setSource(expression.toCharArray()); ASTNode node = parser.createAST(null); if (node.getLength() == 0) throw new IntegratedLanguageExpressionRuntimeException("Invalid java conversion: " + expression); Expression jdtExpression = (Expression) node; return (Expression) ASTNode.copySubtree(getAST(), jdtExpression); } }
Group PTF #75
org.asup.dk.compiler.rpj/src/org/asup/dk/compiler/rpj/writer/JDTCallableUnitWriter.java
Group PTF #75
Java
agpl-3.0
5f623908a45d4319cc7c24d1bb63de80adab32e0
0
mbrossard/cryptonit-cloud,mbrossard/cryptonit-cloud,mbrossard/cryptonit-cloud
package org.cryptonit.cloud.keystore; import java.security.MessageDigest; import java.security.PrivateKey; import java.security.PublicKey; import java.sql.CallableStatement; import java.sql.Connection; import java.text.SimpleDateFormat; import java.util.Base64; import java.util.Date; import org.bouncycastle.asn1.DERPrintableString; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder; import org.cryptonit.cloud.Database; import org.cryptonit.cloud.interfaces.IdentityStore; import org.cryptonit.cloud.interfaces.KeyStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SqlIdentityStore implements IdentityStore { private static final Logger LOGGER = LoggerFactory.getLogger(SqlIdentityStore.class); Database database; KeyStore keyStore; public SqlIdentityStore(Database database, KeyStore keyStore) { this.database = database; this.keyStore = keyStore; } @Override public String newIdentity(String domain, String keyId, X500Name subject) throws Exception { PrivateKey key = keyStore.getPrivateKey(domain, keyId); PublicKey pub = keyStore.getPublicKey(domain, keyId); ContentSigner signer = new JcaContentSignerBuilder("SHA256withRSA").build(key); JcaPKCS10CertificationRequestBuilder csrBuilder = new JcaPKCS10CertificationRequestBuilder(subject, pub); String date = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss").format(new Date(System.currentTimeMillis())); csrBuilder.addAttribute(PKCSObjectIdentifiers.pkcs_9_at_challengePassword, new DERPrintableString(date)); PKCS10CertificationRequest csr = csrBuilder.build(signer); MessageDigest md = MessageDigest.getInstance("SHA-256"); md.update(csr.getEncoded()); byte[] digest = md.digest(); String id = String.format("%064x", new java.math.BigInteger(1, digest)); Connection c = database.getConnection(); CallableStatement cs = c.prepareCall("INSERT INTO identity(domain, identityId, subject, created, request) " + "VALUES (?, ?, ?, NOW(), ?)"); cs.setString(1, domain); cs.setString(2, id); cs.setString(3, subject.toString()); cs.setString(4, Base64.getEncoder().encodeToString(csr.getEncoded())); cs.execute(); return id; } }
src/main/java/org/cryptonit/cloud/keystore/SqlIdentityStore.java
package org.cryptonit.cloud.keystore; import java.security.MessageDigest; import java.security.PrivateKey; import java.security.PublicKey; import java.text.SimpleDateFormat; import java.util.Date; import org.bouncycastle.asn1.DERPrintableString; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder; import org.cryptonit.cloud.Database; import org.cryptonit.cloud.interfaces.IdentityStore; import org.cryptonit.cloud.interfaces.KeyStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SqlIdentityStore implements IdentityStore { private static final Logger LOGGER = LoggerFactory.getLogger(SqlIdentityStore.class); Database database; KeyStore keyStore; public SqlIdentityStore(Database database, KeyStore keyStore) { this.database = database; this.keyStore = keyStore; } @Override public String newIdentity(String domain, String keyId, X500Name subject) throws Exception { PrivateKey key = keyStore.getPrivateKey(domain, keyId); PublicKey pub = keyStore.getPublicKey(domain, keyId); ContentSigner signer = new JcaContentSignerBuilder("SHA256withRSA").build(key); JcaPKCS10CertificationRequestBuilder csrBuilder = new JcaPKCS10CertificationRequestBuilder(subject, pub); String date = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss").format(new Date(System.currentTimeMillis())); csrBuilder.addAttribute(PKCSObjectIdentifiers.pkcs_9_at_challengePassword, new DERPrintableString(date)); PKCS10CertificationRequest csr = csrBuilder.build(signer); MessageDigest md = MessageDigest.getInstance("SHA-256"); md.update(csr.getEncoded()); byte[] digest = md.digest(); String id = String.format("%064x", new java.math.BigInteger(1, digest)); return id; } }
Adding persistence in newIdentity
src/main/java/org/cryptonit/cloud/keystore/SqlIdentityStore.java
Adding persistence in newIdentity
Java
agpl-3.0
ca4e0312f0d7466b85793ba4b8c5800557a81f8c
0
jwillia/kc-old1,kuali/kc,iu-uits-es/kc,geothomasp/kcmit,geothomasp/kcmit,iu-uits-es/kc,ColostateResearchServices/kc,geothomasp/kcmit,kuali/kc,mukadder/kc,jwillia/kc-old1,iu-uits-es/kc,geothomasp/kcmit,UniversityOfHawaiiORS/kc,geothomasp/kcmit,ColostateResearchServices/kc,jwillia/kc-old1,kuali/kc,jwillia/kc-old1,ColostateResearchServices/kc,mukadder/kc,mukadder/kc,UniversityOfHawaiiORS/kc,UniversityOfHawaiiORS/kc
/* * Copyright 2005-2010 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.proposaldevelopment.service.impl; import static org.kuali.kra.infrastructure.Constants.CO_INVESTIGATOR_ROLE; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.BooleanUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.kuali.kra.award.awardhierarchy.sync.service.AwardSyncServiceImpl; import org.kuali.kra.award.home.Award; import org.kuali.kra.bo.Sponsor; import org.kuali.kra.bo.Unit; import org.kuali.kra.bo.versioning.VersionHistory; import org.kuali.kra.budget.core.Budget; import org.kuali.kra.budget.core.BudgetService; import org.kuali.kra.budget.distributionincome.BudgetCostShare; import org.kuali.kra.budget.document.BudgetDocument; import org.kuali.kra.budget.versions.BudgetDocumentVersion; import org.kuali.kra.budget.versions.BudgetVersionOverview; import org.kuali.kra.infrastructure.Constants; import org.kuali.kra.infrastructure.PermissionConstants; import org.kuali.kra.institutionalproposal.home.InstitutionalProposal; import org.kuali.kra.institutionalproposal.proposaladmindetails.ProposalAdminDetails; import org.kuali.kra.proposaldevelopment.bo.CoPiInfoDO; import org.kuali.kra.proposaldevelopment.bo.CostShareInfoDO; import org.kuali.kra.proposaldevelopment.bo.DevelopmentProposal; import org.kuali.kra.proposaldevelopment.bo.ProposalBudgetStatus; import org.kuali.kra.proposaldevelopment.bo.ProposalColumnsToAlter; import org.kuali.kra.proposaldevelopment.bo.ProposalOverview; import org.kuali.kra.proposaldevelopment.bo.ProposalPerson; import org.kuali.kra.proposaldevelopment.bo.ProposalSite; import org.kuali.kra.proposaldevelopment.budget.bo.BudgetColumnsToAlter; import org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument; import org.kuali.kra.proposaldevelopment.service.ProposalDevelopmentService; import org.kuali.kra.proposaldevelopment.web.struts.form.ProposalDevelopmentForm; import org.kuali.kra.service.KraPersistenceStructureService; import org.kuali.kra.service.UnitAuthorizationService; import org.kuali.kra.service.VersionHistoryService; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.coreservice.framework.parameter.ParameterService; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kns.authorization.AuthorizationConstants; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.bo.PersistableBusinessObject; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.ObjectUtils; // TODO : extends PersistenceServiceStructureImplBase is a hack to temporarily resolve get class descriptor. public class ProposalDevelopmentServiceImpl implements ProposalDevelopmentService { protected final Log LOG = LogFactory.getLog(AwardSyncServiceImpl.class); private BusinessObjectService businessObjectService; private UnitAuthorizationService unitAuthService; private KraPersistenceStructureService kraPersistenceStructureService; private BudgetService budgetService; private ParameterService parameterService; private DocumentService documentService; private VersionHistoryService versionHistoryService; /** * Sets the ParameterService. * * @param parameterService the parameter service. */ public void setParameterService(ParameterService parameterService) { this.parameterService = parameterService; } /** * This method gets called from the "save" action. It initializes the applicant org. on the first save; it also sets the * performing org. if the user didn't make a selection. * * @param proposalDevelopmentDocument */ public void initializeUnitOrganizationLocation(ProposalDevelopmentDocument proposalDevelopmentDocument) { ProposalSite applicantOrganization = proposalDevelopmentDocument.getDevelopmentProposal().getApplicantOrganization(); DevelopmentProposal developmentProposal = proposalDevelopmentDocument.getDevelopmentProposal(); // Unit number chosen, set Applicant Organization if (developmentProposal.getOwnedByUnitNumber() != null && applicantOrganization.getOrganization() == null) { // get Lead Unit details developmentProposal.refreshReferenceObject("ownedByUnit"); String applicantOrganizationId = developmentProposal.getOwnedByUnit().getOrganizationId(); // get Organzation assoc. w/ Lead Unit, set applicant org applicantOrganization = createProposalSite(applicantOrganizationId, getNextSiteNumber(proposalDevelopmentDocument)); developmentProposal.setApplicantOrganization(applicantOrganization); } // On first save, set Performing Organization if not selected ProposalSite performingOrganization = developmentProposal.getPerformingOrganization(); if (StringUtils.isEmpty(developmentProposal.getProposalNumber()) && performingOrganization.getOrganization() == null && developmentProposal.getOwnedByUnitNumber() != null) { String performingOrganizationId = developmentProposal.getOwnedByUnit().getOrganizationId(); performingOrganization = createProposalSite(performingOrganizationId, getNextSiteNumber(proposalDevelopmentDocument)); developmentProposal.setPerformingOrganization(performingOrganization); } } /** * Constructs a ProposalSite; initializes the organization, and locationName fields, and sets the default district if there is * one defined for the Organization. * * @param organizationId */ protected ProposalSite createProposalSite(String organizationId, int siteNumber) { ProposalSite proposalSite = new ProposalSite(); proposalSite.setOrganizationId(organizationId); proposalSite.refreshReferenceObject("organization"); proposalSite.setLocationName(proposalSite.getOrganization().getOrganizationName()); proposalSite.initializeDefaultCongressionalDistrict(); return proposalSite; } protected int getNextSiteNumber(ProposalDevelopmentDocument proposalDevelopmentDocument) { return proposalDevelopmentDocument.getDocumentNextValue(Constants.PROPOSAL_LOCATION_SEQUENCE_NUMBER); } // see interface for Javadoc public void initializeProposalSiteNumbers(ProposalDevelopmentDocument proposalDevelopmentDocument) { for (ProposalSite proposalSite : proposalDevelopmentDocument.getDevelopmentProposal().getProposalSites()) if (proposalSite.getSiteNumber() == null) { proposalSite.setSiteNumber(getNextSiteNumber(proposalDevelopmentDocument)); } } public List<Unit> getDefaultModifyProposalUnitsForUser(String userId) { return unitAuthService.getUnits(userId, Constants.MODULE_NAMESPACE_PROPOSAL_DEVELOPMENT, PermissionConstants.CREATE_PROPOSAL); } /** * Gets units for the given names. Useful when you know what you want. * * @param unitNumbers varargs representation of unitNumber array * @return Collection<Unit> */ protected Collection<Unit> getUnitsWithNumbers(String... unitNumbers) { Collection<Unit> retval = new ArrayList<Unit>(); for (String unitNumber : unitNumbers) { Map<String, String> query_map = new HashMap<String, String>(); query_map.put("unitNumber", unitNumber); retval.add((Unit) getBusinessObjectService().findByPrimaryKey(Unit.class, query_map)); } return retval; } /** * Accessor for <code>{@link BusinessObjectService}</code> * * @param bos BusinessObjectService */ public void setBusinessObjectService(BusinessObjectService bos) { businessObjectService = bos; } /** * Accessor for <code>{@link BusinessObjectService}</code> * * @return BusinessObjectService */ public BusinessObjectService getBusinessObjectService() { return businessObjectService; } /** * Set the Unit Authorization Service. Injected by Spring. * * @param unitAuthService */ public void setUnitAuthorizationService(UnitAuthorizationService unitAuthService) { this.unitAuthService = unitAuthService; } public String populateProposalEditableFieldMetaDataForAjaxCall(String proposalNumber, String editableFieldDBColumn) { if (isAuthorizedToAccess(proposalNumber)) { if (StringUtils.isNotBlank(proposalNumber) && proposalNumber.contains(Constants.COLON)) { proposalNumber = StringUtils.split(proposalNumber, Constants.COLON)[0]; } return populateProposalEditableFieldMetaData(proposalNumber, editableFieldDBColumn); } return StringUtils.EMPTY; } public String populateBudgetEditableFieldMetaDataForAjaxCall(String proposalNumber, String documentNumber, String editableFieldDBColumn) { if (isAuthorizedToAccess(proposalNumber)) { return populateBudgetEditableFieldMetaData(documentNumber, editableFieldDBColumn); } return StringUtils.EMPTY; } protected ProposalOverview getProposalOverview(String proposalNumber) { Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("proposalNumber", proposalNumber); ProposalOverview currentProposal = (ProposalOverview) businessObjectService.findByPrimaryKey(ProposalOverview.class, primaryKeys); return currentProposal; } protected BudgetVersionOverview getBudgetVersionOverview(String documentNumber) { BudgetVersionOverview currentBudget=null; Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("documentNumber", documentNumber); Collection<BudgetVersionOverview> currentBudgets = businessObjectService.findMatching(BudgetVersionOverview.class, primaryKeys); for (BudgetVersionOverview budgetVersionOverview:currentBudgets) { if (budgetVersionOverview.isFinalVersionFlag()) { currentBudget = budgetVersionOverview; break; } } return currentBudget; } protected String getLookupDisplayValue(String lookupClassName, String value, String displayAttributeName) { Map<String, Object> primaryKeys = new HashMap<String, Object>(); List<String> lookupClassPkFields = null; Class lookupClass = null; String displayValue = ""; String returnValue = ""; PersistableBusinessObject businessObject = null; if (StringUtils.isNotEmpty(lookupClassName)) { try { lookupClass = Class.forName(lookupClassName); lookupClassPkFields = (List<String>) kraPersistenceStructureService.getPrimaryKeys(lookupClass); } catch (ClassNotFoundException e) { } if (CollectionUtils.isNotEmpty(lookupClassPkFields)) { returnValue = StringUtils.isNotEmpty(lookupClassPkFields.get(0)) ? lookupClassPkFields.get(0) : ""; if (StringUtils.isNotEmpty(value)) { primaryKeys.put(lookupClassPkFields.get(0), value); businessObject = (PersistableBusinessObject) businessObjectService.findByPrimaryKey(lookupClass, primaryKeys); if (businessObject != null) { displayValue = getPropertyValue(businessObject, displayAttributeName); displayValue = StringUtils.isNotEmpty(displayValue) ? displayValue : ""; } } } } return returnValue + "," + displayAttributeName + "," + displayValue; } public String getDataOverrideLookupDisplayReturnValue(String lookupClassName) { List<String> lookupClassPkFields = null; String returnValue = ""; Class lookupClass = null; if (StringUtils.isNotEmpty(lookupClassName)) { try { lookupClass = Class.forName(lookupClassName); lookupClassPkFields = (List<String>) kraPersistenceStructureService.getPrimaryKeys(lookupClass); } catch (ClassNotFoundException e) { } if (CollectionUtils.isNotEmpty(lookupClassPkFields)) { returnValue = StringUtils.isNotEmpty(lookupClassPkFields.get(0)) ? lookupClassPkFields.get(0) : ""; } } return returnValue; } public String getDataOverrideLookupDisplayDisplayValue(String lookupClassName, String value, String displayAttributeName) { Map<String, Object> primaryKeys = new HashMap<String, Object>(); List<String> lookupClassPkFields = null; Class lookupClass = null; String displayValue = ""; PersistableBusinessObject businessObject = null; if (StringUtils.isNotEmpty(lookupClassName)) { try { lookupClass = Class.forName(lookupClassName); lookupClassPkFields = (List<String>) kraPersistenceStructureService.getPrimaryKeys(lookupClass); } catch (ClassNotFoundException e) { } if (CollectionUtils.isNotEmpty(lookupClassPkFields)) { if (StringUtils.isNotEmpty(value)) { primaryKeys.put(lookupClassPkFields.get(0), value); businessObject = (PersistableBusinessObject) businessObjectService.findByPrimaryKey(lookupClass, primaryKeys); if (businessObject != null) { displayValue = getPropertyValue(businessObject, displayAttributeName); displayValue = StringUtils.isNotEmpty(displayValue) ? displayValue : ""; } } } } return displayValue; } protected String getPropertyValue(BusinessObject businessObject, String fieldName) { String displayValue = ""; try { displayValue = (String) ObjectUtils.getPropertyValue(businessObject, fieldName); } // Might happen due to Unknown Property Exception catch (RuntimeException e) { } return displayValue; } public Object getProposalFieldValueFromDBColumnName(String proposalNumber, String dbColumnName) { Object fieldValue = null; Map<String, String> fieldMap = kraPersistenceStructureService.getDBColumnToObjectAttributeMap(ProposalOverview.class); String proposalAttributeName = fieldMap.get(dbColumnName); if (StringUtils.isNotEmpty(proposalAttributeName)) { ProposalOverview currentProposal = getProposalOverview(proposalNumber); if (currentProposal != null) { fieldValue = ObjectUtils.getPropertyValue(currentProposal, proposalAttributeName); } } return fieldValue; } public Object getBudgetFieldValueFromDBColumnName(String documentNumber, String dbColumnName) { Object fieldValue = null; Map<String, String> fieldMap = kraPersistenceStructureService.getDBColumnToObjectAttributeMap(BudgetVersionOverview.class); String budgetAttributeName = fieldMap.get(dbColumnName); if (StringUtils.isNotEmpty(budgetAttributeName)) { BudgetVersionOverview currentBudget = getBudgetVersionOverview(documentNumber); if (currentBudget != null) { fieldValue = ObjectUtils.getPropertyValue(currentBudget, budgetAttributeName); } } return fieldValue; } protected String populateProposalEditableFieldMetaData(String proposalNumber, String editableFieldDBColumn) { String returnValue = ""; if (GlobalVariables.getMessageMap() != null) { GlobalVariables.getMessageMap().clearErrorMessages(); } Object fieldValue = getProposalFieldValueFromDBColumnName(proposalNumber, editableFieldDBColumn); Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("columnName", editableFieldDBColumn); ProposalColumnsToAlter editableColumn = (ProposalColumnsToAlter) businessObjectService.findByPrimaryKey( ProposalColumnsToAlter.class, primaryKeys); if (editableColumn.getHasLookup()) { returnValue = getDataOverrideLookupDisplayReturnValue(editableColumn.getLookupClass()) + "," + editableColumn.getLookupReturn() + "," + getDataOverrideLookupDisplayDisplayValue(editableColumn.getLookupClass(), (fieldValue != null ? fieldValue.toString() : ""), editableColumn.getLookupReturn()); } else if (fieldValue != null && editableColumn.getDataType().equalsIgnoreCase("DATE")) { returnValue = ",," + CoreApiServiceLocator.getDateTimeService().toString((Date) fieldValue, "MM/dd/yyyy"); } else if (fieldValue != null) { returnValue = ",," + fieldValue.toString(); } else { returnValue = ",,"; } returnValue += "," + editableColumn.getDataType(); returnValue += "," + editableColumn.getHasLookup(); returnValue += "," + editableColumn.getLookupClass(); return returnValue; } @SuppressWarnings("unchecked") public Award getProposalCurrentAwardVersion(ProposalDevelopmentDocument proposal) { String awardNumber = proposal.getDevelopmentProposal().getCurrentAwardNumber(); VersionHistory vh = versionHistoryService.findActiveVersion(Award.class, awardNumber); Award award = null; if (vh != null) { award = (Award) vh.getSequenceOwner(); } else { HashMap<String, String> valueMap = new HashMap<String, String>(); valueMap.put("awardNumber", awardNumber); List<Award> awards = (List<Award>) businessObjectService.findMatching(Award.class, valueMap); if (awards != null && !awards.isEmpty()) { award = awards.get(0); } } return award; } public InstitutionalProposal getProposalContinuedFromVersion(ProposalDevelopmentDocument proposal) { String proposalNumber = proposal.getDevelopmentProposal().getContinuedFrom(); VersionHistory vh = versionHistoryService.findActiveVersion(InstitutionalProposal.class, proposalNumber); InstitutionalProposal ip = null; if (vh != null) { ip = (InstitutionalProposal) vh.getSequenceOwner(); } else if (StringUtils.isNotEmpty(proposalNumber)) { HashMap<String, String> valueMap = new HashMap<String, String>(); valueMap.put("proposalNumber", proposalNumber); List<InstitutionalProposal> proposals = (List<InstitutionalProposal>) businessObjectService.findMatching( InstitutionalProposal.class, valueMap); if (proposals != null && !proposals.isEmpty()) { ip = proposals.get(0); } } return ip; } public KraPersistenceStructureService getKraPersistenceStructureService() { return kraPersistenceStructureService; } public void setKraPersistenceStructureService(KraPersistenceStructureService kraPersistenceStructureService) { this.kraPersistenceStructureService = kraPersistenceStructureService; } /** * Retrieve injected <code>{@link BudgetService}</code> singleton * * @return BudgetService */ public BudgetService getBudgetService() { return budgetService; } /** * Inject <code>{@link BudgetService}</code> singleton * * @return budgetService to assign */ public void setBudgetService(BudgetService budgetService) { this.budgetService = budgetService; } public void setVersionHistoryService(VersionHistoryService versionHistoryService) { this.versionHistoryService = versionHistoryService; } public boolean isGrantsGovEnabledForProposal(DevelopmentProposal devProposal) { return !devProposal.isChild() && devProposal.getSponsor() != null && StringUtils.equals(devProposal.getSponsor().getSponsorTypeCode(), "0"); } public boolean isGrantsGovEnabledOnSponsorChange(String proposalNumber, String sponsorCode) { DevelopmentProposal proposal = (DevelopmentProposal) getBusinessObjectService().findBySinglePrimaryKey( DevelopmentProposal.class, proposalNumber); Sponsor sponsor = (Sponsor) getBusinessObjectService().findBySinglePrimaryKey(Sponsor.class, sponsorCode); boolean enableGrantsGov = proposal == null || !proposal.isChild(); enableGrantsGov &= sponsor != null && StringUtils.equals(sponsor.getSponsorTypeCode(), "0"); return enableGrantsGov; } /** * * @see org.kuali.kra.proposaldevelopment.service.ProposalDevelopmentService#deleteProposal(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument) */ public void deleteProposal(ProposalDevelopmentDocument proposalDocument) throws WorkflowException { ListIterator<BudgetDocumentVersion> iter = proposalDocument.getBudgetDocumentVersions().listIterator(); while (iter.hasNext()) { BudgetDocumentVersion budgetVersion = iter.next(); deleteProposalBudget(budgetVersion.getDocumentNumber(), proposalDocument); iter.remove(); } // remove budget statuses as they are not referenced via ojb, but there is a // database constraint that requires removing these first Map<String, Object> keyValues = new HashMap<String, Object>(); keyValues.put("proposalNumber", proposalDocument.getDevelopmentProposal().getProposalNumber()); getBusinessObjectService().deleteMatching(ProposalBudgetStatus.class, keyValues); proposalDocument.getDevelopmentProposalList().clear(); proposalDocument.getBudgetDocumentVersions().clear(); proposalDocument.setProposalDeleted(true); // because the devproplist was cleared above the dev prop and associated BOs will be // deleted upon save getBusinessObjectService().save(proposalDocument); getDocumentService().cancelDocument(proposalDocument, "Delete Proposal"); } protected void deleteProposalBudget(String budgetDocumentNumber, ProposalDevelopmentDocument parentDocument) { try { BudgetDocument document = (BudgetDocument) getDocumentService().getByDocumentHeaderId(budgetDocumentNumber); document.getBudgets().clear(); // make sure the budget points to this instance of the pdd as other deleted budgets // must be removed so they don't fail document validation. document.setParentDocument(parentDocument); document.setBudgetDeleted(true); getDocumentService().saveDocument(document); } catch (WorkflowException e) { LOG.warn("Error getting budget document to delete", e); } } protected DocumentService getDocumentService() { return documentService; } public void setDocumentService(DocumentService documentService) { this.documentService = documentService; } /* * a utility method to check if dwr/ajax call really has authorization 'updateProtocolFundingSource' also accessed by non ajax * call */ private boolean isAuthorizedToAccess(String proposalNumber) { boolean isAuthorized = true; if (proposalNumber.contains(Constants.COLON)) { if (GlobalVariables.getUserSession() != null) { // TODO : this is a quick hack for KC 3.1.1 to provide authorization check for dwr/ajax call. dwr/ajax will be // replaced by // jquery/ajax in rice 2.0 String[] invalues = StringUtils.split(proposalNumber, Constants.COLON); String docFormKey = invalues[1]; if (StringUtils.isBlank(docFormKey)) { isAuthorized = false; } else { Object formObj = GlobalVariables.getUserSession().retrieveObject(docFormKey); if (formObj == null || !(formObj instanceof ProposalDevelopmentForm)) { isAuthorized = false; } else { Map<String, String> editModes = ((ProposalDevelopmentForm) formObj).getEditingMode(); isAuthorized = BooleanUtils.toBoolean(editModes.get(AuthorizationConstants.EditMode.FULL_ENTRY)) || BooleanUtils.toBoolean(editModes.get(AuthorizationConstants.EditMode.VIEW_ONLY)) || BooleanUtils.toBoolean(editModes.get("modifyProposal")); } } } else { // TODO : it seemed that tomcat has this issue intermittently ? LOG.info("dwr/ajax does not have session "); } } return isAuthorized; } public Budget getFinalBudget(DevelopmentProposal proposal) { List<BudgetDocumentVersion> budgetDocuments = proposal.getProposalDocument().getBudgetDocumentVersions(); Map<String, Object> fieldValues = new HashMap<String, Object>(); Budget budget = null; if (budgetDocuments != null && budgetDocuments.size() > 0) { for (BudgetDocumentVersion budgetDocument : budgetDocuments) { fieldValues.clear(); fieldValues.put("document_number", budgetDocument.getDocumentNumber()); List<Budget> budgets = (List<Budget>) getBusinessObjectService().findMatching(Budget.class, fieldValues); budget = budgets.get(0); // break out if we find the final budget if (budget.getFinalVersionFlag()) { break; } } } return budget; } public List<CoPiInfoDO> getCoPiPiInfo(DevelopmentProposal proposal) { List<ProposalPerson> proposalPersons = proposal.getProposalPersons(); List<CoPiInfoDO> coPiInfos = new ArrayList<CoPiInfoDO>(); for (ProposalPerson proposalPerson : proposalPersons) { if (proposalPerson.getProposalPersonRoleId().equals(CO_INVESTIGATOR_ROLE)) { CoPiInfoDO coPiInfo = new CoPiInfoDO(); coPiInfo.setCoPiUnit(proposalPerson.getHomeUnit()); coPiInfo.setCoPiName(proposalPerson.getFullName()); coPiInfos.add(coPiInfo); } } return coPiInfos; } public List<CostShareInfoDO> getCostShareInfo(Budget budget) { List<BudgetCostShare> costShares = budget.getBudgetCostShares(); List<CostShareInfoDO> costShareInfos = new ArrayList<CostShareInfoDO>(); if (costShares != null && costShares.size() > 0) { for (BudgetCostShare costShare : costShares) { if (!Constants.THIRD_PARTY_UNIT_NO.equals(costShare.getSourceUnit())) { CostShareInfoDO costShareInfo = new CostShareInfoDO(); costShareInfo.setCostShareUnit(costShare.getSourceUnit()); costShareInfo.setCostShareAmount(costShare.getShareAmount()); costShareInfos.add(costShareInfo); } } } return costShareInfos; } /** * Return the institutional proposal linked to the development proposal. * * @param proposalDevelopmentDocument * @param instProposalNumber * @return */ public InstitutionalProposal getInstitutionalProposal(String devProposalNumber) { Long instProposalId = null; Map<String, Object> values = new HashMap<String, Object>(); values.put("devProposalNumber", devProposalNumber); Collection<ProposalAdminDetails> proposalAdminDetails = businessObjectService.findMatching(ProposalAdminDetails.class, values); for (Iterator iter = proposalAdminDetails.iterator(); iter.hasNext();) { ProposalAdminDetails pad = (ProposalAdminDetails) iter.next(); pad.refreshReferenceObject("institutionalProposal"); return pad.getInstitutionalProposal(); } return null; } protected String populateBudgetEditableFieldMetaData( String documentNumber, String editableFieldDBColumn) { String returnValue = ""; //BudgetDocument budgetDocument = null; if (GlobalVariables.getMessageMap() != null) { GlobalVariables.getMessageMap().clearErrorMessages(); } Object fieldValue = getBudgetFieldValueFromDBColumnName(documentNumber, editableFieldDBColumn); Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("columnName", editableFieldDBColumn); BudgetColumnsToAlter editableColumn = (BudgetColumnsToAlter) businessObjectService.findByPrimaryKey( BudgetColumnsToAlter.class, primaryKeys); if (editableColumn.getHasLookup()) { returnValue = getDataOverrideLookupDisplayReturnValue(editableColumn.getLookupClass()) + "," + editableColumn.getLookupReturn() + "," + getDataOverrideLookupDisplayDisplayValue(editableColumn.getLookupClass(), (fieldValue != null ? fieldValue.toString() : ""), editableColumn.getLookupReturn()); } else if (fieldValue != null && editableColumn.getDataType().equalsIgnoreCase("DATE")) { returnValue = ",," + CoreApiServiceLocator.getDateTimeService().toString((Date) fieldValue, "MM/dd/yyyy"); } else if (fieldValue != null) { returnValue = ",," + fieldValue.toString(); } else { returnValue = ",,"; } if (fieldValue instanceof Boolean) { editableColumn.setDataType("boolean"); } returnValue += "," + editableColumn.getDataType(); returnValue += "," + editableColumn.getHasLookup(); returnValue += "," + editableColumn.getLookupClass(); return returnValue; } }
src/main/java/org/kuali/kra/proposaldevelopment/service/impl/ProposalDevelopmentServiceImpl.java
/* * Copyright 2005-2010 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.proposaldevelopment.service.impl; import static org.kuali.kra.infrastructure.Constants.CO_INVESTIGATOR_ROLE; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.BooleanUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hamcrest.core.IsInstanceOf; import org.kuali.kra.award.awardhierarchy.sync.service.AwardSyncServiceImpl; import org.kuali.kra.award.home.Award; import org.kuali.kra.bo.Sponsor; import org.kuali.kra.bo.Unit; import org.kuali.kra.bo.versioning.VersionHistory; import org.kuali.kra.budget.core.Budget; import org.kuali.kra.budget.core.BudgetService; import org.kuali.kra.budget.distributionincome.BudgetCostShare; import org.kuali.kra.budget.document.BudgetDocument; import org.kuali.kra.budget.versions.BudgetDocumentVersion; import org.kuali.kra.budget.versions.BudgetVersionOverview; import org.kuali.kra.infrastructure.Constants; import org.kuali.kra.infrastructure.KraServiceLocator; import org.kuali.kra.infrastructure.PermissionConstants; import org.kuali.kra.institutionalproposal.home.InstitutionalProposal; import org.kuali.kra.institutionalproposal.proposaladmindetails.ProposalAdminDetails; import org.kuali.kra.proposaldevelopment.bo.CoPiInfoDO; import org.kuali.kra.proposaldevelopment.bo.CostShareInfoDO; import org.kuali.kra.proposaldevelopment.bo.DevelopmentProposal; import org.kuali.kra.proposaldevelopment.bo.ProposalBudgetStatus; import org.kuali.kra.proposaldevelopment.bo.ProposalColumnsToAlter; import org.kuali.kra.proposaldevelopment.bo.ProposalOverview; import org.kuali.kra.proposaldevelopment.bo.ProposalPerson; import org.kuali.kra.proposaldevelopment.bo.ProposalSite; import org.kuali.kra.proposaldevelopment.budget.bo.BudgetColumnsToAlter; import org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument; import org.kuali.kra.proposaldevelopment.service.ProposalDevelopmentService; import org.kuali.kra.proposaldevelopment.web.struts.form.ProposalDevelopmentForm; import org.kuali.kra.service.KraPersistenceStructureService; import org.kuali.kra.service.UnitAuthorizationService; import org.kuali.kra.service.VersionHistoryService; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.coreservice.framework.parameter.ParameterService; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kns.authorization.AuthorizationConstants; import org.kuali.rice.kns.service.DataDictionaryService; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.bo.PersistableBusinessObject; import org.kuali.rice.krad.datadictionary.AttributeDefinition; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.ObjectUtils; import com.sun.org.apache.bcel.internal.generic.NEW; // TODO : extends PersistenceServiceStructureImplBase is a hack to temporarily resolve get class descriptor. public class ProposalDevelopmentServiceImpl implements ProposalDevelopmentService { protected final Log LOG = LogFactory.getLog(AwardSyncServiceImpl.class); private BusinessObjectService businessObjectService; private UnitAuthorizationService unitAuthService; private KraPersistenceStructureService kraPersistenceStructureService; private BudgetService budgetService; private ParameterService parameterService; private DocumentService documentService; private VersionHistoryService versionHistoryService; /** * Sets the ParameterService. * * @param parameterService the parameter service. */ public void setParameterService(ParameterService parameterService) { this.parameterService = parameterService; } /** * This method gets called from the "save" action. It initializes the applicant org. on the first save; it also sets the * performing org. if the user didn't make a selection. * * @param proposalDevelopmentDocument */ public void initializeUnitOrganizationLocation(ProposalDevelopmentDocument proposalDevelopmentDocument) { ProposalSite applicantOrganization = proposalDevelopmentDocument.getDevelopmentProposal().getApplicantOrganization(); DevelopmentProposal developmentProposal = proposalDevelopmentDocument.getDevelopmentProposal(); // Unit number chosen, set Applicant Organization if (developmentProposal.getOwnedByUnitNumber() != null && applicantOrganization.getOrganization() == null) { // get Lead Unit details developmentProposal.refreshReferenceObject("ownedByUnit"); String applicantOrganizationId = developmentProposal.getOwnedByUnit().getOrganizationId(); // get Organzation assoc. w/ Lead Unit, set applicant org applicantOrganization = createProposalSite(applicantOrganizationId, getNextSiteNumber(proposalDevelopmentDocument)); developmentProposal.setApplicantOrganization(applicantOrganization); } // On first save, set Performing Organization if not selected ProposalSite performingOrganization = developmentProposal.getPerformingOrganization(); if (StringUtils.isEmpty(developmentProposal.getProposalNumber()) && performingOrganization.getOrganization() == null && developmentProposal.getOwnedByUnitNumber() != null) { String performingOrganizationId = developmentProposal.getOwnedByUnit().getOrganizationId(); performingOrganization = createProposalSite(performingOrganizationId, getNextSiteNumber(proposalDevelopmentDocument)); developmentProposal.setPerformingOrganization(performingOrganization); } } /** * Constructs a ProposalSite; initializes the organization, and locationName fields, and sets the default district if there is * one defined for the Organization. * * @param organizationId */ protected ProposalSite createProposalSite(String organizationId, int siteNumber) { ProposalSite proposalSite = new ProposalSite(); proposalSite.setOrganizationId(organizationId); proposalSite.refreshReferenceObject("organization"); proposalSite.setLocationName(proposalSite.getOrganization().getOrganizationName()); proposalSite.initializeDefaultCongressionalDistrict(); return proposalSite; } protected int getNextSiteNumber(ProposalDevelopmentDocument proposalDevelopmentDocument) { return proposalDevelopmentDocument.getDocumentNextValue(Constants.PROPOSAL_LOCATION_SEQUENCE_NUMBER); } // see interface for Javadoc public void initializeProposalSiteNumbers(ProposalDevelopmentDocument proposalDevelopmentDocument) { for (ProposalSite proposalSite : proposalDevelopmentDocument.getDevelopmentProposal().getProposalSites()) if (proposalSite.getSiteNumber() == null) { proposalSite.setSiteNumber(getNextSiteNumber(proposalDevelopmentDocument)); } } public List<Unit> getDefaultModifyProposalUnitsForUser(String userId) { return unitAuthService.getUnits(userId, Constants.MODULE_NAMESPACE_PROPOSAL_DEVELOPMENT, PermissionConstants.CREATE_PROPOSAL); } /** * Gets units for the given names. Useful when you know what you want. * * @param unitNumbers varargs representation of unitNumber array * @return Collection<Unit> */ protected Collection<Unit> getUnitsWithNumbers(String... unitNumbers) { Collection<Unit> retval = new ArrayList<Unit>(); for (String unitNumber : unitNumbers) { Map<String, String> query_map = new HashMap<String, String>(); query_map.put("unitNumber", unitNumber); retval.add((Unit) getBusinessObjectService().findByPrimaryKey(Unit.class, query_map)); } return retval; } /** * Accessor for <code>{@link BusinessObjectService}</code> * * @param bos BusinessObjectService */ public void setBusinessObjectService(BusinessObjectService bos) { businessObjectService = bos; } /** * Accessor for <code>{@link BusinessObjectService}</code> * * @return BusinessObjectService */ public BusinessObjectService getBusinessObjectService() { return businessObjectService; } /** * Set the Unit Authorization Service. Injected by Spring. * * @param unitAuthService */ public void setUnitAuthorizationService(UnitAuthorizationService unitAuthService) { this.unitAuthService = unitAuthService; } public String populateProposalEditableFieldMetaDataForAjaxCall(String proposalNumber, String editableFieldDBColumn) { if (isAuthorizedToAccess(proposalNumber)) { if (StringUtils.isNotBlank(proposalNumber) && proposalNumber.contains(Constants.COLON)) { proposalNumber = StringUtils.split(proposalNumber, Constants.COLON)[0]; } return populateProposalEditableFieldMetaData(proposalNumber, editableFieldDBColumn); } return StringUtils.EMPTY; } public String populateBudgetEditableFieldMetaDataForAjaxCall(String proposalNumber, String documentNumber, String editableFieldDBColumn) { if (isAuthorizedToAccess(proposalNumber)) { return populateBudgetEditableFieldMetaData(documentNumber, editableFieldDBColumn); } return StringUtils.EMPTY; } protected ProposalOverview getProposalOverview(String proposalNumber) { Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("proposalNumber", proposalNumber); ProposalOverview currentProposal = (ProposalOverview) businessObjectService.findByPrimaryKey(ProposalOverview.class, primaryKeys); return currentProposal; } protected BudgetVersionOverview getBudgetVersionOverview(String documentNumber) { BudgetVersionOverview currentBudget=null; Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("documentNumber", documentNumber); Collection<BudgetVersionOverview> currentBudgets = businessObjectService.findMatching(BudgetVersionOverview.class, primaryKeys); for (BudgetVersionOverview budgetVersionOverview:currentBudgets) { if (budgetVersionOverview.isFinalVersionFlag()) { currentBudget = budgetVersionOverview; break; } } return currentBudget; } protected String getLookupDisplayValue(String lookupClassName, String value, String displayAttributeName) { Map<String, Object> primaryKeys = new HashMap<String, Object>(); List<String> lookupClassPkFields = null; Class lookupClass = null; String displayValue = ""; String returnValue = ""; PersistableBusinessObject businessObject = null; if (StringUtils.isNotEmpty(lookupClassName)) { try { lookupClass = Class.forName(lookupClassName); lookupClassPkFields = (List<String>) kraPersistenceStructureService.getPrimaryKeys(lookupClass); } catch (ClassNotFoundException e) { } if (CollectionUtils.isNotEmpty(lookupClassPkFields)) { returnValue = StringUtils.isNotEmpty(lookupClassPkFields.get(0)) ? lookupClassPkFields.get(0) : ""; if (StringUtils.isNotEmpty(value)) { primaryKeys.put(lookupClassPkFields.get(0), value); businessObject = (PersistableBusinessObject) businessObjectService.findByPrimaryKey(lookupClass, primaryKeys); if (businessObject != null) { displayValue = getPropertyValue(businessObject, displayAttributeName); displayValue = StringUtils.isNotEmpty(displayValue) ? displayValue : ""; } } } } return returnValue + "," + displayAttributeName + "," + displayValue; } public String getDataOverrideLookupDisplayReturnValue(String lookupClassName) { List<String> lookupClassPkFields = null; String returnValue = ""; Class lookupClass = null; if (StringUtils.isNotEmpty(lookupClassName)) { try { lookupClass = Class.forName(lookupClassName); lookupClassPkFields = (List<String>) kraPersistenceStructureService.getPrimaryKeys(lookupClass); } catch (ClassNotFoundException e) { } if (CollectionUtils.isNotEmpty(lookupClassPkFields)) { returnValue = StringUtils.isNotEmpty(lookupClassPkFields.get(0)) ? lookupClassPkFields.get(0) : ""; } } return returnValue; } public String getDataOverrideLookupDisplayDisplayValue(String lookupClassName, String value, String displayAttributeName) { Map<String, Object> primaryKeys = new HashMap<String, Object>(); List<String> lookupClassPkFields = null; Class lookupClass = null; String displayValue = ""; PersistableBusinessObject businessObject = null; if (StringUtils.isNotEmpty(lookupClassName)) { try { lookupClass = Class.forName(lookupClassName); lookupClassPkFields = (List<String>) kraPersistenceStructureService.getPrimaryKeys(lookupClass); } catch (ClassNotFoundException e) { } if (CollectionUtils.isNotEmpty(lookupClassPkFields)) { if (StringUtils.isNotEmpty(value)) { primaryKeys.put(lookupClassPkFields.get(0), value); businessObject = (PersistableBusinessObject) businessObjectService.findByPrimaryKey(lookupClass, primaryKeys); if (businessObject != null) { displayValue = getPropertyValue(businessObject, displayAttributeName); displayValue = StringUtils.isNotEmpty(displayValue) ? displayValue : ""; } } } } return displayValue; } protected String getPropertyValue(BusinessObject businessObject, String fieldName) { String displayValue = ""; try { displayValue = (String) ObjectUtils.getPropertyValue(businessObject, fieldName); } // Might happen due to Unknown Property Exception catch (RuntimeException e) { } return displayValue; } public Object getProposalFieldValueFromDBColumnName(String proposalNumber, String dbColumnName) { Object fieldValue = null; Map<String, String> fieldMap = kraPersistenceStructureService.getDBColumnToObjectAttributeMap(ProposalOverview.class); String proposalAttributeName = fieldMap.get(dbColumnName); if (StringUtils.isNotEmpty(proposalAttributeName)) { ProposalOverview currentProposal = getProposalOverview(proposalNumber); if (currentProposal != null) { fieldValue = ObjectUtils.getPropertyValue(currentProposal, proposalAttributeName); } } return fieldValue; } public Object getBudgetFieldValueFromDBColumnName(String documentNumber, String dbColumnName) { Object fieldValue = null; Map<String, String> fieldMap = kraPersistenceStructureService.getDBColumnToObjectAttributeMap(BudgetVersionOverview.class); String budgetAttributeName = fieldMap.get(dbColumnName); if (StringUtils.isNotEmpty(budgetAttributeName)) { BudgetVersionOverview currentBudget = getBudgetVersionOverview(documentNumber); if (currentBudget != null) { fieldValue = ObjectUtils.getPropertyValue(currentBudget, budgetAttributeName); } } return fieldValue; } protected String populateProposalEditableFieldMetaData(String proposalNumber, String editableFieldDBColumn) { String returnValue = ""; if (GlobalVariables.getMessageMap() != null) { GlobalVariables.getMessageMap().clearErrorMessages(); } Object fieldValue = getProposalFieldValueFromDBColumnName(proposalNumber, editableFieldDBColumn); Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("columnName", editableFieldDBColumn); ProposalColumnsToAlter editableColumn = (ProposalColumnsToAlter) businessObjectService.findByPrimaryKey( ProposalColumnsToAlter.class, primaryKeys); if (editableColumn.getHasLookup()) { returnValue = getDataOverrideLookupDisplayReturnValue(editableColumn.getLookupClass()) + "," + editableColumn.getLookupReturn() + "," + getDataOverrideLookupDisplayDisplayValue(editableColumn.getLookupClass(), (fieldValue != null ? fieldValue.toString() : ""), editableColumn.getLookupReturn()); } else if (fieldValue != null && editableColumn.getDataType().equalsIgnoreCase("DATE")) { returnValue = ",," + CoreApiServiceLocator.getDateTimeService().toString((Date) fieldValue, "MM/dd/yyyy"); } else if (fieldValue != null) { returnValue = ",," + fieldValue.toString(); } else { returnValue = ",,"; } returnValue += "," + editableColumn.getDataType(); returnValue += "," + editableColumn.getHasLookup(); returnValue += "," + editableColumn.getLookupClass(); return returnValue; } @SuppressWarnings("unchecked") public Award getProposalCurrentAwardVersion(ProposalDevelopmentDocument proposal) { String awardNumber = proposal.getDevelopmentProposal().getCurrentAwardNumber(); VersionHistory vh = versionHistoryService.findActiveVersion(Award.class, awardNumber); Award award = null; if (vh != null) { award = (Award) vh.getSequenceOwner(); } else { HashMap<String, String> valueMap = new HashMap<String, String>(); valueMap.put("awardNumber", awardNumber); List<Award> awards = (List<Award>) businessObjectService.findMatching(Award.class, valueMap); if (awards != null && !awards.isEmpty()) { award = awards.get(0); } } return award; } public InstitutionalProposal getProposalContinuedFromVersion(ProposalDevelopmentDocument proposal) { String proposalNumber = proposal.getDevelopmentProposal().getContinuedFrom(); VersionHistory vh = versionHistoryService.findActiveVersion(InstitutionalProposal.class, proposalNumber); InstitutionalProposal ip = null; if (vh != null) { ip = (InstitutionalProposal) vh.getSequenceOwner(); } else if (StringUtils.isNotEmpty(proposalNumber)) { HashMap<String, String> valueMap = new HashMap<String, String>(); valueMap.put("proposalNumber", proposalNumber); List<InstitutionalProposal> proposals = (List<InstitutionalProposal>) businessObjectService.findMatching( InstitutionalProposal.class, valueMap); if (proposals != null && !proposals.isEmpty()) { ip = proposals.get(0); } } return ip; } public KraPersistenceStructureService getKraPersistenceStructureService() { return kraPersistenceStructureService; } public void setKraPersistenceStructureService(KraPersistenceStructureService kraPersistenceStructureService) { this.kraPersistenceStructureService = kraPersistenceStructureService; } /** * Retrieve injected <code>{@link BudgetService}</code> singleton * * @return BudgetService */ public BudgetService getBudgetService() { return budgetService; } /** * Inject <code>{@link BudgetService}</code> singleton * * @return budgetService to assign */ public void setBudgetService(BudgetService budgetService) { this.budgetService = budgetService; } public void setVersionHistoryService(VersionHistoryService versionHistoryService) { this.versionHistoryService = versionHistoryService; } public boolean isGrantsGovEnabledForProposal(DevelopmentProposal devProposal) { return !devProposal.isChild() && devProposal.getSponsor() != null && StringUtils.equals(devProposal.getSponsor().getSponsorTypeCode(), "0"); } public boolean isGrantsGovEnabledOnSponsorChange(String proposalNumber, String sponsorCode) { DevelopmentProposal proposal = (DevelopmentProposal) getBusinessObjectService().findBySinglePrimaryKey( DevelopmentProposal.class, proposalNumber); Sponsor sponsor = (Sponsor) getBusinessObjectService().findBySinglePrimaryKey(Sponsor.class, sponsorCode); boolean enableGrantsGov = proposal == null || !proposal.isChild(); enableGrantsGov &= sponsor != null && StringUtils.equals(sponsor.getSponsorTypeCode(), "0"); return enableGrantsGov; } /** * * @see org.kuali.kra.proposaldevelopment.service.ProposalDevelopmentService#deleteProposal(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument) */ public void deleteProposal(ProposalDevelopmentDocument proposalDocument) throws WorkflowException { ListIterator<BudgetDocumentVersion> iter = proposalDocument.getBudgetDocumentVersions().listIterator(); while (iter.hasNext()) { BudgetDocumentVersion budgetVersion = iter.next(); deleteProposalBudget(budgetVersion.getDocumentNumber(), proposalDocument); iter.remove(); } // remove budget statuses as they are not referenced via ojb, but there is a // database constraint that requires removing these first Map<String, Object> keyValues = new HashMap<String, Object>(); keyValues.put("proposalNumber", proposalDocument.getDevelopmentProposal().getProposalNumber()); getBusinessObjectService().deleteMatching(ProposalBudgetStatus.class, keyValues); proposalDocument.getDevelopmentProposalList().clear(); proposalDocument.getBudgetDocumentVersions().clear(); proposalDocument.setProposalDeleted(true); // because the devproplist was cleared above the dev prop and associated BOs will be // deleted upon save getBusinessObjectService().save(proposalDocument); getDocumentService().cancelDocument(proposalDocument, "Delete Proposal"); } protected void deleteProposalBudget(String budgetDocumentNumber, ProposalDevelopmentDocument parentDocument) { try { BudgetDocument document = (BudgetDocument) getDocumentService().getByDocumentHeaderId(budgetDocumentNumber); document.getBudgets().clear(); // make sure the budget points to this instance of the pdd as other deleted budgets // must be removed so they don't fail document validation. document.setParentDocument(parentDocument); document.setBudgetDeleted(true); getDocumentService().saveDocument(document); } catch (WorkflowException e) { LOG.warn("Error getting budget document to delete", e); } } protected DocumentService getDocumentService() { return documentService; } public void setDocumentService(DocumentService documentService) { this.documentService = documentService; } /* * a utility method to check if dwr/ajax call really has authorization 'updateProtocolFundingSource' also accessed by non ajax * call */ private boolean isAuthorizedToAccess(String proposalNumber) { boolean isAuthorized = true; if (proposalNumber.contains(Constants.COLON)) { if (GlobalVariables.getUserSession() != null) { // TODO : this is a quick hack for KC 3.1.1 to provide authorization check for dwr/ajax call. dwr/ajax will be // replaced by // jquery/ajax in rice 2.0 String[] invalues = StringUtils.split(proposalNumber, Constants.COLON); String docFormKey = invalues[1]; if (StringUtils.isBlank(docFormKey)) { isAuthorized = false; } else { Object formObj = GlobalVariables.getUserSession().retrieveObject(docFormKey); if (formObj == null || !(formObj instanceof ProposalDevelopmentForm)) { isAuthorized = false; } else { Map<String, String> editModes = ((ProposalDevelopmentForm) formObj).getEditingMode(); isAuthorized = BooleanUtils.toBoolean(editModes.get(AuthorizationConstants.EditMode.FULL_ENTRY)) || BooleanUtils.toBoolean(editModes.get(AuthorizationConstants.EditMode.VIEW_ONLY)) || BooleanUtils.toBoolean(editModes.get("modifyProposal")); } } } else { // TODO : it seemed that tomcat has this issue intermittently ? LOG.info("dwr/ajax does not have session "); } } return isAuthorized; } public Budget getFinalBudget(DevelopmentProposal proposal) { List<BudgetDocumentVersion> budgetDocuments = proposal.getProposalDocument().getBudgetDocumentVersions(); Map<String, Object> fieldValues = new HashMap<String, Object>(); Budget budget = null; if (budgetDocuments != null && budgetDocuments.size() > 0) { for (BudgetDocumentVersion budgetDocument : budgetDocuments) { fieldValues.clear(); fieldValues.put("document_number", budgetDocument.getDocumentNumber()); List<Budget> budgets = (List<Budget>) getBusinessObjectService().findMatching(Budget.class, fieldValues); budget = budgets.get(0); // break out if we find the final budget if (budget.getFinalVersionFlag()) { break; } } } return budget; } public List<CoPiInfoDO> getCoPiPiInfo(DevelopmentProposal proposal) { List<ProposalPerson> proposalPersons = proposal.getProposalPersons(); List<CoPiInfoDO> coPiInfos = new ArrayList<CoPiInfoDO>(); for (ProposalPerson proposalPerson : proposalPersons) { if (proposalPerson.getProposalPersonRoleId().equals(CO_INVESTIGATOR_ROLE)) { CoPiInfoDO coPiInfo = new CoPiInfoDO(); coPiInfo.setCoPiUnit(proposalPerson.getHomeUnit()); coPiInfo.setCoPiName(proposalPerson.getFullName()); coPiInfos.add(coPiInfo); } } return coPiInfos; } public List<CostShareInfoDO> getCostShareInfo(Budget budget) { List<BudgetCostShare> costShares = budget.getBudgetCostShares(); List<CostShareInfoDO> costShareInfos = new ArrayList<CostShareInfoDO>(); if (costShares != null && costShares.size() > 0) { for (BudgetCostShare costShare : costShares) { if (!Constants.THIRD_PARTY_UNIT_NO.equals(costShare.getSourceUnit())) { CostShareInfoDO costShareInfo = new CostShareInfoDO(); costShareInfo.setCostShareUnit(costShare.getSourceUnit()); costShareInfo.setCostShareAmount(costShare.getShareAmount()); costShareInfos.add(costShareInfo); } } } return costShareInfos; } /** * Return the institutional proposal linked to the development proposal. * * @param proposalDevelopmentDocument * @param instProposalNumber * @return */ public InstitutionalProposal getInstitutionalProposal(String devProposalNumber) { Long instProposalId = null; Map<String, Object> values = new HashMap<String, Object>(); values.put("devProposalNumber", devProposalNumber); Collection<ProposalAdminDetails> proposalAdminDetails = businessObjectService.findMatching(ProposalAdminDetails.class, values); for (Iterator iter = proposalAdminDetails.iterator(); iter.hasNext();) { ProposalAdminDetails pad = (ProposalAdminDetails) iter.next(); pad.refreshReferenceObject("institutionalProposal"); return pad.getInstitutionalProposal(); } return null; } protected String populateBudgetEditableFieldMetaData( String documentNumber, String editableFieldDBColumn) { String returnValue = ""; //BudgetDocument budgetDocument = null; if (GlobalVariables.getMessageMap() != null) { GlobalVariables.getMessageMap().clearErrorMessages(); } Object fieldValue = getBudgetFieldValueFromDBColumnName(documentNumber, editableFieldDBColumn); Map<String, Object> primaryKeys = new HashMap<String, Object>(); primaryKeys.put("columnName", editableFieldDBColumn); BudgetColumnsToAlter editableColumn = (BudgetColumnsToAlter) businessObjectService.findByPrimaryKey( BudgetColumnsToAlter.class, primaryKeys); if (editableColumn.getHasLookup()) { returnValue = getDataOverrideLookupDisplayReturnValue(editableColumn.getLookupClass()) + "," + editableColumn.getLookupReturn() + "," + getDataOverrideLookupDisplayDisplayValue(editableColumn.getLookupClass(), (fieldValue != null ? fieldValue.toString() : ""), editableColumn.getLookupReturn()); } else if (fieldValue != null && editableColumn.getDataType().equalsIgnoreCase("DATE")) { returnValue = ",," + CoreApiServiceLocator.getDateTimeService().toString((Date) fieldValue, "MM/dd/yyyy"); } else if (fieldValue != null) { returnValue = ",," + fieldValue.toString(); } else { returnValue = ",,"; } if (fieldValue instanceof Boolean) { editableColumn.setDataType("boolean"); } returnValue += "," + editableColumn.getDataType(); returnValue += "," + editableColumn.getHasLookup(); returnValue += "," + editableColumn.getLookupClass(); return returnValue; } }
KRACOEUS-4758
src/main/java/org/kuali/kra/proposaldevelopment/service/impl/ProposalDevelopmentServiceImpl.java
KRACOEUS-4758
Java
lgpl-2.1
7e6c1554c024170031dfedc9a513abb1c45c5103
0
darranl/wildfly-core,JiriOndrusek/wildfly-core,jamezp/wildfly-core,bstansberry/wildfly-core,ivassile/wildfly-core,JiriOndrusek/wildfly-core,aloubyansky/wildfly-core,luck3y/wildfly-core,ivassile/wildfly-core,jamezp/wildfly-core,jfdenise/wildfly-core,yersan/wildfly-core,bstansberry/wildfly-core,jamezp/wildfly-core,aloubyansky/wildfly-core,yersan/wildfly-core,luck3y/wildfly-core,luck3y/wildfly-core,bstansberry/wildfly-core,jfdenise/wildfly-core,jfdenise/wildfly-core,aloubyansky/wildfly-core,JiriOndrusek/wildfly-core,ivassile/wildfly-core,soul2zimate/wildfly-core,yersan/wildfly-core,soul2zimate/wildfly-core,darranl/wildfly-core,darranl/wildfly-core,soul2zimate/wildfly-core
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.domain.http.server; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ACCESS_MECHANISM; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.COMPOSITE; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.FAILED; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.HOST; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OPERATION_HEADERS; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OUTCOME; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_OPERATION_DESCRIPTION_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_OPERATION_NAMES_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_RESOURCE_DESCRIPTION_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_RESOURCE_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.RESULT; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.SUCCESS; import static org.jboss.as.domain.http.server.DomainUtil.writeResponse; import static org.jboss.as.domain.http.server.logging.HttpServerLogger.ROOT_LOGGER; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.security.MessageDigest; import java.util.ArrayList; import java.util.Deque; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.util.ETag; import io.undertow.util.ETagUtils; import io.undertow.util.HeaderMap; import io.undertow.util.Headers; import io.undertow.util.HexConverter; import io.undertow.util.Methods; import org.jboss.as.controller.ModelController; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.client.OperationBuilder; import org.jboss.as.controller.client.OperationMessageHandler; import org.jboss.as.core.security.AccessMechanism; import org.jboss.as.domain.http.server.logging.HttpServerLogger; import org.jboss.dmr.ModelNode; import org.xnio.IoUtils; import org.xnio.streams.ChannelInputStream; /** * * @author <a href="[email protected]">Kabir Khan</a> */ class DomainApiHandler implements HttpHandler { private static final String JSON_PRETTY = "json.pretty"; /** * Represents all possible management operations that can be executed using HTTP GET. Cacheable operations * have a {@code maxAge} property &gt; 0. */ enum GetOperation { /* * It is essential that the GET requests exposed over the HTTP interface are for read only * operations that do not modify the domain model or update anything server side. */ RESOURCE(READ_RESOURCE_OPERATION, 0), ATTRIBUTE("read-attribute", 0), RESOURCE_DESCRIPTION(READ_RESOURCE_DESCRIPTION_OPERATION, Common.ONE_WEEK), SNAPSHOTS("list-snapshots", 0), OPERATION_DESCRIPTION(READ_OPERATION_DESCRIPTION_OPERATION, Common.ONE_WEEK), OPERATION_NAMES(READ_OPERATION_NAMES_OPERATION, 0); private String realOperation; private int maxAge; GetOperation(String realOperation, int maxAge) { this.realOperation = realOperation; this.maxAge = maxAge; } public String realOperation() { return realOperation; } public int getMaxAge() { return maxAge; } } private final ModelController modelController; DomainApiHandler(ModelController modelController) { this.modelController = modelController; } @Override public void handleRequest(final HttpServerExchange exchange) { final ModelNode dmr; ModelNode response; HeaderMap requestHeaders = exchange.getRequestHeaders(); final boolean cachable; final boolean get = exchange.getRequestMethod().equals(Methods.GET); final boolean encode = Common.APPLICATION_DMR_ENCODED.equals(requestHeaders.getFirst(Headers.ACCEPT)) || Common.APPLICATION_DMR_ENCODED.equals(requestHeaders.getFirst(Headers.CONTENT_TYPE)); final OperationParameter.Builder operationParameterBuilder = new OperationParameter.Builder(get).encode(encode); try { if (get) { GetOperation operation = getOperation(exchange); operationParameterBuilder.maxAge(operation.getMaxAge()); dmr = convertGetRequest(exchange, operation); cachable = operation.getMaxAge() > 0; } else { dmr = convertPostRequest(exchange, encode); cachable = false; } boolean pretty = false; if (dmr.hasDefined(JSON_PRETTY)) { String jsonPretty = dmr.get(JSON_PRETTY).asString(); pretty = "true".equalsIgnoreCase(jsonPretty) || "1".equals(jsonPretty); } operationParameterBuilder.pretty(pretty); } catch (Exception e) { ROOT_LOGGER.debugf("Unable to construct ModelNode '%s'", e.getMessage()); Common.sendError(exchange, false, e.getLocalizedMessage()); return; } final ResponseCallback callback = new ResponseCallback() { @Override void doSendResponse(final ModelNode response) { if (response.hasDefined(OUTCOME) && FAILED.equals(response.get(OUTCOME).asString())) { Common.sendError(exchange, encode, response); return; } writeResponse(exchange, 200, response, operationParameterBuilder.build()); } }; final boolean sendPreparedResponse = sendPreparedResponse(dmr); final ModelController.OperationTransactionControl control = sendPreparedResponse ? new ModelController.OperationTransactionControl() { @Override public void operationPrepared(final ModelController.OperationTransaction transaction, final ModelNode result) { transaction.commit(); // Fix prepared result result.get(OUTCOME).set(SUCCESS); result.get(RESULT); callback.sendResponse(result); } } : ModelController.OperationTransactionControl.COMMIT; try { dmr.get(OPERATION_HEADERS, ACCESS_MECHANISM).set(AccessMechanism.HTTP.toString()); response = modelController.execute(dmr, OperationMessageHandler.logging, control, new OperationBuilder(dmr).build()); if (cachable) { // Use the MD5 of the model nodes toString() method as ETag MessageDigest md = MessageDigest.getInstance("MD5"); md.update(response.toString().getBytes()); ETag etag = new ETag(false, HexConverter.convertToHexString(md.digest())); operationParameterBuilder.etag(etag); if (!ETagUtils.handleIfNoneMatch(exchange, etag, false)) { exchange.setResponseCode(304); DomainUtil.writeCacheHeaders(exchange, 304, operationParameterBuilder.build()); exchange.endExchange(); return; } } } catch (Throwable t) { ROOT_LOGGER.modelRequestError(t); Common.sendError(exchange, encode, t.getLocalizedMessage()); return; } callback.sendResponse(response); } private GetOperation getOperation(HttpServerExchange exchange) { Map<String, Deque<String>> queryParameters = exchange.getQueryParameters(); GetOperation operation = null; Deque<String> parameter = queryParameters.get(OP); if (parameter != null) { String value = parameter.getFirst(); try { operation = GetOperation.valueOf(value.toUpperCase(Locale.ENGLISH).replace('-', '_')); value = operation.realOperation(); } catch (Exception e) { throw HttpServerLogger.ROOT_LOGGER.invalidOperation(e, value); } } // This will now only occur if no operation at all was specified on the incoming request. if (operation == null) { operation = GetOperation.RESOURCE; } return operation; } private ModelNode convertGetRequest(HttpServerExchange exchange, GetOperation operation) { ArrayList<String> pathSegments = decodePath(exchange.getRequestPath()); Map<String, Deque<String>> queryParameters = exchange.getQueryParameters(); ModelNode dmr = new ModelNode(); for (Entry<String, Deque<String>> entry : queryParameters.entrySet()) { String key = entry.getKey(); String value = entry.getValue().getFirst(); ModelNode valueNode; if (key.startsWith("operation-header-")) { String header = key.substring("operation-header-".length()); valueNode = dmr.get(OPERATION_HEADERS, header); } else { valueNode = dmr.get(key); } valueNode.set(!value.equals("") ? value : "true"); } dmr.get(OP).set(operation.realOperation); ModelNode list = dmr.get(OP_ADDR).setEmptyList(); for (int i = 1; i < pathSegments.size() - 1; i += 2) { list.add(pathSegments.get(i), pathSegments.get(i + 1)); } return dmr; } private ModelNode convertPostRequest(HttpServerExchange exchange, boolean encode) throws IOException { InputStream in = new ChannelInputStream(exchange.getRequestChannel()); try { return encode ? ModelNode.fromBase64(in) : ModelNode.fromJSONStream(in); } finally { IoUtils.safeClose(in); } } private ArrayList<String> decodePath(String path) { if (path == null) throw new IllegalArgumentException(); int i = path.charAt(0) == '/' ? 1 : 0; ArrayList<String> segments = new ArrayList<String>(); do { int j = path.indexOf('/', i); if (j == -1) j = path.length(); segments.add(unescape(path.substring(i, j))); i = j + 1; } while (i < path.length()); return segments; } private String unescape(String string) { try { // URLDecoder could be way more efficient, replace it one day return URLDecoder.decode(string, Common.UTF_8); } catch (UnsupportedEncodingException e) { throw new IllegalStateException(e); } } /** * Determine whether the prepared response should be sent, before the operation completed. This is needed in order * that operations like :reload() can be executed without causing communication failures. * * @param operation the operation to be executed * @return {@code true} if the prepared result should be sent, {@code false} otherwise */ private boolean sendPreparedResponse(final ModelNode operation) { final PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR)); final String op = operation.get(OP).asString(); final int size = address.size(); if (size == 0) { if (op.equals("reload")) { return true; } else if (op.equals(COMPOSITE)) { // TODO return false; } else { return false; } } else if (size == 1) { if (address.getLastElement().getKey().equals(HOST)) { return op.equals("reload"); } } return false; } /** * Callback to prevent the response will be sent multiple times. */ private abstract static class ResponseCallback { private volatile boolean complete; void sendResponse(final ModelNode response) { if (complete) { return; } complete = true; doSendResponse(response); } abstract void doSendResponse(ModelNode response); } }
domain-http/interface/src/main/java/org/jboss/as/domain/http/server/DomainApiHandler.java
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.domain.http.server; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ACCESS_MECHANISM; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.COMPOSITE; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.FAILED; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.HOST; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OPERATION_HEADERS; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OUTCOME; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_OPERATION_DESCRIPTION_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_OPERATION_NAMES_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_RESOURCE_DESCRIPTION_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.READ_RESOURCE_OPERATION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.RESULT; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.SUCCESS; import static org.jboss.as.domain.http.server.DomainUtil.writeResponse; import static org.jboss.as.domain.http.server.logging.HttpServerLogger.ROOT_LOGGER; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.security.MessageDigest; import java.util.ArrayList; import java.util.Deque; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.util.ETag; import io.undertow.util.ETagUtils; import io.undertow.util.HeaderMap; import io.undertow.util.Headers; import io.undertow.util.HexConverter; import io.undertow.util.Methods; import org.jboss.as.controller.ModelController; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.client.OperationBuilder; import org.jboss.as.controller.client.OperationMessageHandler; import org.jboss.as.core.security.AccessMechanism; import org.jboss.as.domain.http.server.logging.HttpServerLogger; import org.jboss.dmr.ModelNode; import org.xnio.IoUtils; import org.xnio.streams.ChannelInputStream; /** * * @author <a href="[email protected]">Kabir Khan</a> */ class DomainApiHandler implements HttpHandler { /** * Represents all possible management operations that can be executed using HTTP GET. Cacheable operations * have a {@code maxAge} property &gt; 0. */ enum GetOperation { /* * It is essential that the GET requests exposed over the HTTP interface are for read only * operations that do not modify the domain model or update anything server side. */ RESOURCE(READ_RESOURCE_OPERATION, 0), ATTRIBUTE("read-attribute", 0), RESOURCE_DESCRIPTION(READ_RESOURCE_DESCRIPTION_OPERATION, Common.ONE_WEEK), SNAPSHOTS("list-snapshots", 0), OPERATION_DESCRIPTION(READ_OPERATION_DESCRIPTION_OPERATION, Common.ONE_WEEK), OPERATION_NAMES(READ_OPERATION_NAMES_OPERATION, 0); private String realOperation; private int maxAge; GetOperation(String realOperation, int maxAge) { this.realOperation = realOperation; this.maxAge = maxAge; } public String realOperation() { return realOperation; } public int getMaxAge() { return maxAge; } } private final ModelController modelController; DomainApiHandler(ModelController modelController) { this.modelController = modelController; } @Override public void handleRequest(final HttpServerExchange exchange) { final ModelNode dmr; ModelNode response; HeaderMap requestHeaders = exchange.getRequestHeaders(); final boolean cachable; final boolean get = exchange.getRequestMethod().equals(Methods.GET); final boolean encode = Common.APPLICATION_DMR_ENCODED.equals(requestHeaders.getFirst(Headers.ACCEPT)) || Common.APPLICATION_DMR_ENCODED.equals(requestHeaders.getFirst(Headers.CONTENT_TYPE)); final OperationParameter.Builder operationParameterBuilder = new OperationParameter.Builder(get).encode(encode); try { if (get) { GetOperation operation = getOperation(exchange); operationParameterBuilder.maxAge(operation.getMaxAge()); dmr = convertGetRequest(exchange, operation); cachable = operation.getMaxAge() > 0; } else { dmr = convertPostRequest(exchange, encode); cachable = false; } operationParameterBuilder.pretty(dmr.hasDefined("json.pretty") && dmr.get("json.pretty").asBoolean()); } catch (Exception e) { ROOT_LOGGER.debugf("Unable to construct ModelNode '%s'", e.getMessage()); Common.sendError(exchange, false, e.getLocalizedMessage()); return; } final ResponseCallback callback = new ResponseCallback() { @Override void doSendResponse(final ModelNode response) { if (response.hasDefined(OUTCOME) && FAILED.equals(response.get(OUTCOME).asString())) { Common.sendError(exchange, encode, response); return; } writeResponse(exchange, 200, response, operationParameterBuilder.build()); } }; final boolean sendPreparedResponse = sendPreparedResponse(dmr); final ModelController.OperationTransactionControl control = sendPreparedResponse ? new ModelController.OperationTransactionControl() { @Override public void operationPrepared(final ModelController.OperationTransaction transaction, final ModelNode result) { transaction.commit(); // Fix prepared result result.get(OUTCOME).set(SUCCESS); result.get(RESULT); callback.sendResponse(result); } } : ModelController.OperationTransactionControl.COMMIT; try { dmr.get(OPERATION_HEADERS, ACCESS_MECHANISM).set(AccessMechanism.HTTP.toString()); response = modelController.execute(dmr, OperationMessageHandler.logging, control, new OperationBuilder(dmr).build()); if (cachable) { // Use the MD5 of the model nodes toString() method as ETag MessageDigest md = MessageDigest.getInstance("MD5"); md.update(response.toString().getBytes()); ETag etag = new ETag(false, HexConverter.convertToHexString(md.digest())); operationParameterBuilder.etag(etag); if (!ETagUtils.handleIfNoneMatch(exchange, etag, false)) { exchange.setResponseCode(304); DomainUtil.writeCacheHeaders(exchange, 304, operationParameterBuilder.build()); exchange.endExchange(); return; } } } catch (Throwable t) { ROOT_LOGGER.modelRequestError(t); Common.sendError(exchange, encode, t.getLocalizedMessage()); return; } callback.sendResponse(response); } private GetOperation getOperation(HttpServerExchange exchange) { Map<String, Deque<String>> queryParameters = exchange.getQueryParameters(); GetOperation operation = null; Deque<String> parameter = queryParameters.get(OP); if (parameter != null) { String value = parameter.getFirst(); try { operation = GetOperation.valueOf(value.toUpperCase(Locale.ENGLISH).replace('-', '_')); value = operation.realOperation(); } catch (Exception e) { throw HttpServerLogger.ROOT_LOGGER.invalidOperation(e, value); } } // This will now only occur if no operation at all was specified on the incoming request. if (operation == null) { operation = GetOperation.RESOURCE; } return operation; } private ModelNode convertGetRequest(HttpServerExchange exchange, GetOperation operation) { ArrayList<String> pathSegments = decodePath(exchange.getRequestPath()); Map<String, Deque<String>> queryParameters = exchange.getQueryParameters(); ModelNode dmr = new ModelNode(); for (Entry<String, Deque<String>> entry : queryParameters.entrySet()) { String key = entry.getKey(); String value = entry.getValue().getFirst(); ModelNode valueNode; if (key.startsWith("operation-header-")) { String header = key.substring("operation-header-".length()); valueNode = dmr.get(OPERATION_HEADERS, header); } else { valueNode = dmr.get(key); } valueNode.set(!value.equals("") ? value : "true"); } dmr.get(OP).set(operation.realOperation); ModelNode list = dmr.get(OP_ADDR).setEmptyList(); for (int i = 1; i < pathSegments.size() - 1; i += 2) { list.add(pathSegments.get(i), pathSegments.get(i + 1)); } return dmr; } private ModelNode convertPostRequest(HttpServerExchange exchange, boolean encode) throws IOException { InputStream in = new ChannelInputStream(exchange.getRequestChannel()); try { return encode ? ModelNode.fromBase64(in) : ModelNode.fromJSONStream(in); } finally { IoUtils.safeClose(in); } } private ArrayList<String> decodePath(String path) { if (path == null) throw new IllegalArgumentException(); int i = path.charAt(0) == '/' ? 1 : 0; ArrayList<String> segments = new ArrayList<String>(); do { int j = path.indexOf('/', i); if (j == -1) j = path.length(); segments.add(unescape(path.substring(i, j))); i = j + 1; } while (i < path.length()); return segments; } private String unescape(String string) { try { // URLDecoder could be way more efficient, replace it one day return URLDecoder.decode(string, Common.UTF_8); } catch (UnsupportedEncodingException e) { throw new IllegalStateException(e); } } /** * Determine whether the prepared response should be sent, before the operation completed. This is needed in order * that operations like :reload() can be executed without causing communication failures. * * @param operation the operation to be executed * @return {@code true} if the prepared result should be sent, {@code false} otherwise */ private boolean sendPreparedResponse(final ModelNode operation) { final PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR)); final String op = operation.get(OP).asString(); final int size = address.size(); if (size == 0) { if (op.equals("reload")) { return true; } else if (op.equals(COMPOSITE)) { // TODO return false; } else { return false; } } else if (size == 1) { if (address.getLastElement().getKey().equals(HOST)) { return op.equals("reload"); } } return false; } /** * Callback to prevent the response will be sent multiple times. */ private abstract static class ResponseCallback { private volatile boolean complete; void sendResponse(final ModelNode response) { if (complete) { return; } complete = true; doSendResponse(response); } abstract void doSendResponse(ModelNode response); } }
[WFLY-3371] Add support for json.pretty=1 was: ca5c3392e98d3aa89cc285272dde0c68d3b8e5f3
domain-http/interface/src/main/java/org/jboss/as/domain/http/server/DomainApiHandler.java
[WFLY-3371] Add support for json.pretty=1
Java
lgpl-2.1
f4ea1810648a3e8de69debd5f5f09552258e4e60
0
beast-dev/beast-mcmc,4ment/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,adamallo/beast-mcmc,maxbiostat/beast-mcmc,4ment/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,adamallo/beast-mcmc,maxbiostat/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,adamallo/beast-mcmc,maxbiostat/beast-mcmc
/* * MarkovChain.java * * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.inference.markovchain; import dr.evomodel.continuous.GibbsIndependentCoalescentOperator; import dr.inference.model.CompoundLikelihood; import dr.inference.model.Likelihood; import dr.inference.model.Model; import dr.inference.model.PathLikelihood; import dr.inference.operators.*; import java.io.Serializable; import java.util.ArrayList; import java.util.logging.Logger; /** * A concrete markov chain. This is final as the only things that should need * overriding are in the delegates (prior, likelihood, schedule and acceptor). * The design of this class is to be fairly immutable as far as settings goes. * * @author Alexei Drummond * @author Andrew Rambaut * @version $Id: MarkovChain.java,v 1.10 2006/06/21 13:34:42 rambaut Exp $ */ public final class MarkovChain implements Serializable { private static final long serialVersionUID = 181L; private final static boolean DEBUG = false; private final static boolean PROFILE = true; public static final double EVALUATION_TEST_THRESHOLD = 1e-1; private final OperatorSchedule schedule; private final Acceptor acceptor; private final Likelihood likelihood; private boolean pleaseStop = false; private boolean isStopped = false; private double bestScore, currentScore, initialScore; private long currentLength; private boolean useCoercion = true; private final long fullEvaluationCount; private final int minOperatorCountForFullEvaluation; private double evaluationTestThreshold = EVALUATION_TEST_THRESHOLD; public MarkovChain(Likelihood likelihood, OperatorSchedule schedule, Acceptor acceptor, long fullEvaluationCount, int minOperatorCountForFullEvaluation, double evaluationTestThreshold, boolean useCoercion) { currentLength = 0; this.likelihood = likelihood; this.schedule = schedule; this.acceptor = acceptor; this.useCoercion = useCoercion; this.fullEvaluationCount = fullEvaluationCount; this.minOperatorCountForFullEvaluation = minOperatorCountForFullEvaluation; this.evaluationTestThreshold = evaluationTestThreshold; Likelihood.CONNECTED_LIKELIHOOD_SET.add(likelihood); Likelihood.CONNECTED_LIKELIHOOD_SET.addAll(likelihood.getLikelihoodSet()); for (Likelihood l : Likelihood.FULL_LIKELIHOOD_SET) { if (!Likelihood.CONNECTED_LIKELIHOOD_SET.contains(l)) { System.err.println("WARNING: Likelihood component, " + l.getId() + ", created but not used in the MCMC"); } } currentScore = evaluate(likelihood); } /** * Resets the markov chain */ public void reset() { currentLength = 0; // reset operator acceptance levels for (int i = 0; i < schedule.getOperatorCount(); i++) { schedule.getOperator(i).reset(); } } /** * Run the chain for a given number of states. * * @param length number of states to run the chain. */ public long runChain(long length, boolean disableCoerce) { likelihood.makeDirty(); currentScore = evaluate(likelihood); long currentState = currentLength; final Model currentModel = likelihood.getModel(); if (currentState == 0) { initialScore = currentScore; bestScore = currentScore; fireBestModel(currentState, currentModel); } if (currentScore == Double.NEGATIVE_INFINITY) { // identify which component of the score is zero... String message = "The initial likelihood is zero"; if (likelihood instanceof CompoundLikelihood) { message += ": " + ((CompoundLikelihood) likelihood).getDiagnosis(); } else if (likelihood instanceof PathLikelihood) { message += ": " + ((CompoundLikelihood)((PathLikelihood) likelihood).getSourceLikelihood()).getDiagnosis(); message += ": " + ((CompoundLikelihood)((PathLikelihood) likelihood).getDestinationLikelihood()).getDiagnosis(); } else { message += "."; } throw new IllegalArgumentException(message); } else if (currentScore == Double.POSITIVE_INFINITY || Double.isNaN(currentScore)) { String message = "A likelihood returned with a numerical error"; if (likelihood instanceof CompoundLikelihood) { message += ": " + ((CompoundLikelihood) likelihood).getDiagnosis(); } else { message += "."; } throw new IllegalArgumentException(message); } pleaseStop = false; isStopped = false; //int otfcounter = onTheFlyOperatorWeights > 0 ? onTheFlyOperatorWeights : 0; double[] logr = {0.0}; boolean usingFullEvaluation = true; // set ops count in mcmc element instead if (fullEvaluationCount == 0) // Temporary solution until full code review usingFullEvaluation = false; boolean fullEvaluationError = false; while (!pleaseStop && (currentState < (currentLength + length))) { String diagnosticStart = ""; // periodically log states fireCurrentModel(currentState, currentModel); if (pleaseStop) { isStopped = true; break; } // Get the operator final int op = schedule.getNextOperatorIndex(); final MCMCOperator mcmcOperator = schedule.getOperator(op); double oldScore = currentScore; if (usingFullEvaluation) { diagnosticStart = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; } // assert Profiler.startProfile("Store"); // The current model is stored here in case the proposal fails if (currentModel != null) { currentModel.storeModelState(); } // assert Profiler.stopProfile("Store"); boolean operatorSucceeded = true; double hastingsRatio = 1.0; boolean accept = false; logr[0] = -Double.MAX_VALUE; long elaspedTime = 0; if (PROFILE) { elaspedTime = System.currentTimeMillis(); } // The new model is proposed // assert Profiler.startProfile("Operate"); if (DEBUG) { System.out.println("\n>> Iteration: " + currentState); System.out.println("\n&& Operator: " + mcmcOperator.getOperatorName()); } if (mcmcOperator instanceof GeneralOperator) { hastingsRatio = ((GeneralOperator) mcmcOperator).operate(likelihood); } else { hastingsRatio = mcmcOperator.operate(); } // assert Profiler.stopProfile("Operate"); if (hastingsRatio == Double.NEGATIVE_INFINITY) { // Should the evaluation be short-cutted? // Previously this was set to false if OperatorFailedException was thrown. // Now a -Inf HR is returned. operatorSucceeded = false; } if (PROFILE) { long duration = System.currentTimeMillis() - elaspedTime; if (DEBUG) { System.out.println("Time: " + duration); } mcmcOperator.addEvaluationTime(duration); } double score = Double.NaN; double deviation = Double.NaN; // System.err.print("" + currentState + ": "); if (operatorSucceeded) { // The new model is proposed // assert Profiler.startProfile("Evaluate"); if (DEBUG) { System.out.println("** Evaluate"); } long elapsedTime = 0; if (PROFILE) { elapsedTime = System.currentTimeMillis(); } // The new model is evaluated score = evaluate(likelihood); if (PROFILE) { long duration = System.currentTimeMillis() - elapsedTime; if (DEBUG) { System.out.println("Time: " + duration); } mcmcOperator.addEvaluationTime(duration); } String diagnosticOperator = ""; if (usingFullEvaluation) { diagnosticOperator = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; } if (score == Double.NEGATIVE_INFINITY && mcmcOperator instanceof GibbsOperator) { if (!(mcmcOperator instanceof GibbsIndependentNormalDistributionOperator) && !(mcmcOperator instanceof GibbsIndependentGammaOperator) && !(mcmcOperator instanceof GibbsIndependentCoalescentOperator) && !(mcmcOperator instanceof GibbsIndependentJointNormalGammaOperator)) { Logger.getLogger("error").severe("State " + currentState + ": A Gibbs operator, " + mcmcOperator.getOperatorName() + ", returned a state with zero likelihood."); } } if (score == Double.POSITIVE_INFINITY || Double.isNaN(score) ) { if (likelihood instanceof CompoundLikelihood) { Logger.getLogger("error").severe("State "+currentState+": A likelihood returned with a numerical error:\n" + ((CompoundLikelihood)likelihood).getDiagnosis()); } else { Logger.getLogger("error").severe("State "+currentState+": A likelihood returned with a numerical error."); } // If the user has chosen to ignore this error then we transform it // to a negative infinity so the state is rejected. score = Double.NEGATIVE_INFINITY; } if (usingFullEvaluation) { // This is a test that the state was correctly evaluated. The // likelihood of all components of the model are flagged as // needing recalculation, then the full likelihood is calculated // again and compared to the first result. This checks that the // BEAST is aware of all changes that the operator induced. likelihood.makeDirty(); final double testScore = evaluate(likelihood); final String d2 = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; if (Math.abs(testScore - score) > evaluationTestThreshold) { Logger.getLogger("error").severe( "State "+currentState+": State was not correctly calculated after an operator move.\n" + "Likelihood evaluation: " + score + "\nFull Likelihood evaluation: " + testScore + "\n" + "Operator: " + mcmcOperator + " " + mcmcOperator.getOperatorName() + (diagnosticOperator.length() > 0 ? "\n\nDetails\nBefore: " + diagnosticOperator + "\nAfter: " + d2 : "") + "\n\n"); fullEvaluationError = true; } } if (score > bestScore) { bestScore = score; fireBestModel(currentState, currentModel); } accept = mcmcOperator instanceof GibbsOperator || acceptor.accept(oldScore, score, hastingsRatio, logr); deviation = score - oldScore; } // The new model is accepted or rejected if (accept) { if (DEBUG) { System.out.println("** Move accepted: new score = " + score + ", old score = " + oldScore); } mcmcOperator.accept(deviation); currentModel.acceptModelState(); currentScore = score; } else { if (DEBUG) { System.out.println("** Move rejected: new score = " + score + ", old score = " + oldScore + " (logr = " + logr[0] + ")"); } mcmcOperator.reject(); // assert Profiler.startProfile("Restore"); currentModel.restoreModelState(); if (usingFullEvaluation) { // This is a test that the state is correctly restored. The // restored state is fully evaluated and the likelihood compared with // that before the operation was made. likelihood.makeDirty(); final double testScore = evaluate(likelihood); final String d2 = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; if (Math.abs(testScore - oldScore) > evaluationTestThreshold) { final Logger logger = Logger.getLogger("error"); logger.severe("State "+currentState+": State was not correctly restored after reject step.\n" + "Likelihood before: " + oldScore + " Likelihood after: " + testScore + "\n" + "Operator: " + mcmcOperator + " " + mcmcOperator.getOperatorName() + (diagnosticStart.length() > 0 ? "\n\nDetails\nBefore: " + diagnosticStart + "\nAfter: " + d2 : "") + "\n\n"); fullEvaluationError = true; } } } // assert Profiler.stopProfile("Restore"); if (!disableCoerce && mcmcOperator instanceof CoercableMCMCOperator) { coerceAcceptanceProbability((CoercableMCMCOperator) mcmcOperator, logr[0]); } if (usingFullEvaluation) { if (schedule.getMinimumAcceptAndRejectCount() >= minOperatorCountForFullEvaluation && currentState >= fullEvaluationCount) { // full evaluation is only switched off when each operator has done a // minimum number of operations (currently 1) and fullEvalationCount // operations in total. usingFullEvaluation = false; if (fullEvaluationError) { // If there has been an error then stop with an error throw new RuntimeException( "One or more evaluation errors occurred during the test phase of this\n" + "run. These errors imply critical errors which may produce incorrect\n" + "results."); } } } fireEndCurrentIteration(currentState); currentState += 1; } currentLength = currentState; return currentLength; } public void terminateChain() { fireFinished(currentLength); // Profiler.report(); } public Likelihood getLikelihood() { return likelihood; } public Model getModel() { return likelihood.getModel(); } public OperatorSchedule getSchedule() { return schedule; } public Acceptor getAcceptor() { return acceptor; } public double getInitialScore() { return initialScore; } public double getBestScore() { return bestScore; } public long getCurrentLength() { return currentLength; } public void setCurrentLength(long currentLength) { this.currentLength = currentLength; } public double getCurrentScore() { return currentScore; } public void pleaseStop() { pleaseStop = true; } public boolean isStopped() { return isStopped; } public double evaluate() { return evaluate(likelihood); } protected double evaluate(Likelihood likelihood) { double logPosterior = 0.0; final double logLikelihood = likelihood.getLogLikelihood(); if (Double.isNaN(logLikelihood)) { return Double.NEGATIVE_INFINITY; } // System.err.println("** " + logPosterior + " + " + logLikelihood + // " = " + (logPosterior + logLikelihood)); logPosterior += logLikelihood; return logPosterior; } /** * Updates the proposal parameter, based on the target acceptance * probability This method relies on the proposal parameter being a * decreasing function of acceptance probability. * * @param op The operator * @param logr */ private void coerceAcceptanceProbability(CoercableMCMCOperator op, double logr) { if (DEBUG) { System.out.println("coerceAcceptanceProbability " + isCoercable(op)); } if (isCoercable(op)) { final double p = op.getCoercableParameter(); final double i = schedule.getOptimizationTransform(MCMCOperator.Utils.getOperationCount(op)); final double target = op.getTargetAcceptanceProbability(); final double newp = p + ((1.0 / (i + 1.0)) * (Math.exp(logr) - target)); if (newp > -Double.MAX_VALUE && newp < Double.MAX_VALUE) { op.setCoercableParameter(newp); if (DEBUG) { System.out.println("Setting coercable parameter: " + newp + " target: " + target + " logr: " + logr); } } } } private boolean isCoercable(CoercableMCMCOperator op) { return op.getMode() == CoercionMode.COERCION_ON || (op.getMode() != CoercionMode.COERCION_OFF && useCoercion); } public void addMarkovChainListener(MarkovChainListener listener) { if (listener != null) { listeners.add(listener); } } public void removeMarkovChainListener(MarkovChainListener listener) { listeners.remove(listener); } private void fireBestModel(long state, Model bestModel) { for (MarkovChainListener listener : listeners) { listener.bestState(state, this, bestModel); } } private void fireCurrentModel(long state, Model currentModel) { for (MarkovChainListener listener : listeners) { listener.currentState(state, this, currentModel); } } private void fireFinished(long chainLength) { for (MarkovChainListener listener : listeners) { listener.finished(chainLength, this); } } private void fireEndCurrentIteration(long state) { } private final ArrayList<MarkovChainListener> listeners = new ArrayList<MarkovChainListener>(); }
src/dr/inference/markovchain/MarkovChain.java
/* * MarkovChain.java * * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.inference.markovchain; import dr.evomodel.continuous.GibbsIndependentCoalescentOperator; import dr.inference.model.CompoundLikelihood; import dr.inference.model.Likelihood; import dr.inference.model.Model; import dr.inference.model.PathLikelihood; import dr.inference.operators.*; import java.io.Serializable; import java.util.ArrayList; import java.util.logging.Logger; /** * A concrete markov chain. This is final as the only things that should need * overriding are in the delegates (prior, likelihood, schedule and acceptor). * The design of this class is to be fairly immutable as far as settings goes. * * @author Alexei Drummond * @author Andrew Rambaut * @version $Id: MarkovChain.java,v 1.10 2006/06/21 13:34:42 rambaut Exp $ */ public final class MarkovChain implements Serializable { private static final long serialVersionUID = 181L; private final static boolean DEBUG = false; private final static boolean PROFILE = true; public static final double EVALUATION_TEST_THRESHOLD = 1e-1; private final OperatorSchedule schedule; private final Acceptor acceptor; private final Likelihood likelihood; private boolean pleaseStop = false; private boolean isStopped = false; private double bestScore, currentScore, initialScore; private long currentLength; private boolean useCoercion = true; private final long fullEvaluationCount; private final int minOperatorCountForFullEvaluation; private double evaluationTestThreshold = EVALUATION_TEST_THRESHOLD; public MarkovChain(Likelihood likelihood, OperatorSchedule schedule, Acceptor acceptor, long fullEvaluationCount, int minOperatorCountForFullEvaluation, double evaluationTestThreshold, boolean useCoercion) { currentLength = 0; this.likelihood = likelihood; this.schedule = schedule; this.acceptor = acceptor; this.useCoercion = useCoercion; this.fullEvaluationCount = fullEvaluationCount; this.minOperatorCountForFullEvaluation = minOperatorCountForFullEvaluation; this.evaluationTestThreshold = evaluationTestThreshold; Likelihood.CONNECTED_LIKELIHOOD_SET.add(likelihood); Likelihood.CONNECTED_LIKELIHOOD_SET.addAll(likelihood.getLikelihoodSet()); for (Likelihood l : Likelihood.FULL_LIKELIHOOD_SET) { if (!Likelihood.CONNECTED_LIKELIHOOD_SET.contains(l)) { System.err.println("WARNING: Likelihood component, " + l.getId() + ", created but not used in the MCMC"); } } currentScore = evaluate(likelihood); } /** * Resets the markov chain */ public void reset() { currentLength = 0; // reset operator acceptance levels for (int i = 0; i < schedule.getOperatorCount(); i++) { schedule.getOperator(i).reset(); } } /** * Run the chain for a given number of states. * * @param length number of states to run the chain. */ public long runChain(long length, boolean disableCoerce) { likelihood.makeDirty(); currentScore = evaluate(likelihood); long currentState = currentLength; final Model currentModel = likelihood.getModel(); if (currentState == 0) { initialScore = currentScore; bestScore = currentScore; fireBestModel(currentState, currentModel); } if (currentScore == Double.NEGATIVE_INFINITY) { // identify which component of the score is zero... String message = "The initial likelihood is zero"; if (likelihood instanceof CompoundLikelihood) { message += ": " + ((CompoundLikelihood) likelihood).getDiagnosis(); } else if (likelihood instanceof PathLikelihood) { message += ": " + ((CompoundLikelihood)((PathLikelihood) likelihood).getSourceLikelihood()).getDiagnosis(); message += ": " + ((CompoundLikelihood)((PathLikelihood) likelihood).getDestinationLikelihood()).getDiagnosis(); } else { message += "."; } throw new IllegalArgumentException(message); } else if (currentScore == Double.POSITIVE_INFINITY || Double.isNaN(currentScore)) { String message = "A likelihood returned with a numerical error"; if (likelihood instanceof CompoundLikelihood) { message += ": " + ((CompoundLikelihood) likelihood).getDiagnosis(); } else { message += "."; } throw new IllegalArgumentException(message); } pleaseStop = false; isStopped = false; //int otfcounter = onTheFlyOperatorWeights > 0 ? onTheFlyOperatorWeights : 0; double[] logr = {0.0}; boolean usingFullEvaluation = true; // set ops count in mcmc element instead if (fullEvaluationCount == 0) // Temporary solution until full code review usingFullEvaluation = false; boolean fullEvaluationError = false; while (!pleaseStop && (currentState < (currentLength + length))) { String diagnosticStart = ""; // periodically log states fireCurrentModel(currentState, currentModel); if (pleaseStop) { isStopped = true; break; } // Get the operator final int op = schedule.getNextOperatorIndex(); final MCMCOperator mcmcOperator = schedule.getOperator(op); double oldScore = currentScore; if (usingFullEvaluation) { diagnosticStart = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; } // assert Profiler.startProfile("Store"); // The current model is stored here in case the proposal fails if (currentModel != null) { currentModel.storeModelState(); } // assert Profiler.stopProfile("Store"); boolean operatorSucceeded = true; double hastingsRatio = 1.0; boolean accept = false; logr[0] = -Double.MAX_VALUE; long elaspedTime = 0; if (PROFILE) { elaspedTime = System.currentTimeMillis(); } // The new model is proposed // assert Profiler.startProfile("Operate"); if (DEBUG) { System.out.println("\n>> Iteration: " + currentState); System.out.println("\n&& Operator: " + mcmcOperator.getOperatorName()); } if (mcmcOperator instanceof GeneralOperator) { hastingsRatio = ((GeneralOperator) mcmcOperator).operate(likelihood); } else { hastingsRatio = mcmcOperator.operate(); } // assert Profiler.stopProfile("Operate"); if (hastingsRatio == Double.NEGATIVE_INFINITY) { // Should the evaluation be short-cutted? // Previously this was set to false if OperatorFailedException was thrown. // Now a -Inf HR is returned. operatorSucceeded = false; } if (PROFILE) { long duration = System.currentTimeMillis() - elaspedTime; if (DEBUG) { System.out.println("Time: " + duration); } mcmcOperator.addEvaluationTime(duration); } double score = Double.NaN; double deviation = Double.NaN; // System.err.print("" + currentState + ": "); if (operatorSucceeded) { // The new model is proposed // assert Profiler.startProfile("Evaluate"); if (DEBUG) { System.out.println("** Evaluate"); } long elapsedTime = 0; if (PROFILE) { elapsedTime = System.currentTimeMillis(); } // The new model is evaluated score = evaluate(likelihood); if (PROFILE) { long duration = System.currentTimeMillis() - elapsedTime; if (DEBUG) { System.out.println("Time: " + duration); } mcmcOperator.addEvaluationTime(duration); } String diagnosticOperator = ""; if (usingFullEvaluation) { diagnosticOperator = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; } if (score == Double.NEGATIVE_INFINITY && mcmcOperator instanceof GibbsOperator) { if (!(mcmcOperator instanceof GibbsIndependentNormalDistributionOperator) && !(mcmcOperator instanceof GibbsIndependentGammaOperator) && !(mcmcOperator instanceof GibbsIndependentCoalescentOperator) && !(mcmcOperator instanceof GibbsIndependentJointNormalGammaOperator)) { Logger.getLogger("error").severe("State " + currentState + ": A Gibbs operator, " + mcmcOperator.getOperatorName() + ", returned a state with zero likelihood."); } } if (score == Double.POSITIVE_INFINITY || Double.isNaN(score) ) { if (likelihood instanceof CompoundLikelihood) { Logger.getLogger("error").severe("State "+currentState+": A likelihood returned with a numerical error:\n" + ((CompoundLikelihood)likelihood).getDiagnosis()); } else { Logger.getLogger("error").severe("State "+currentState+": A likelihood returned with a numerical error."); } // If the user has chosen to ignore this error then we transform it // to a negative infinity so the state is rejected. score = Double.NEGATIVE_INFINITY; } if (usingFullEvaluation) { // This is a test that the state was correctly evaluated. The // likelihood of all components of the model are flagged as // needing recalculation, then the full likelihood is calculated // again and compared to the first result. This checks that the // BEAST is aware of all changes that the operator induced. likelihood.makeDirty(); final double testScore = evaluate(likelihood); final String d2 = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; if (Math.abs(testScore - score) > evaluationTestThreshold) { Logger.getLogger("error").severe( "State "+currentState+": State was not correctly calculated after an operator move.\n" + "Likelihood evaluation: " + score + "\nFull Likelihood evaluation: " + testScore + "\n" + "Operator: " + mcmcOperator + " " + mcmcOperator.getOperatorName() + (diagnosticOperator.length() > 0 ? "\n\nDetails\nBefore: " + diagnosticOperator + "\nAfter: " + d2 : "") + "\n\n"); fullEvaluationError = true; } } if (score > bestScore) { bestScore = score; fireBestModel(currentState, currentModel); } accept = mcmcOperator instanceof GibbsOperator || acceptor.accept(oldScore, score, hastingsRatio, logr); deviation = score - oldScore; } // The new model is accepted or rejected if (accept) { if (DEBUG) { System.out.println("** Move accepted: new score = " + score + ", old score = " + oldScore); } mcmcOperator.accept(deviation); currentModel.acceptModelState(); currentScore = score; } else { if (DEBUG) { System.out.println("** Move rejected: new score = " + score + ", old score = " + oldScore); } mcmcOperator.reject(); // assert Profiler.startProfile("Restore"); currentModel.restoreModelState(); if (usingFullEvaluation) { // This is a test that the state is correctly restored. The // restored state is fully evaluated and the likelihood compared with // that before the operation was made. likelihood.makeDirty(); final double testScore = evaluate(likelihood); final String d2 = likelihood instanceof CompoundLikelihood ? ((CompoundLikelihood) likelihood).getDiagnosis() : ""; if (Math.abs(testScore - oldScore) > evaluationTestThreshold) { final Logger logger = Logger.getLogger("error"); logger.severe("State "+currentState+": State was not correctly restored after reject step.\n" + "Likelihood before: " + oldScore + " Likelihood after: " + testScore + "\n" + "Operator: " + mcmcOperator + " " + mcmcOperator.getOperatorName() + (diagnosticStart.length() > 0 ? "\n\nDetails\nBefore: " + diagnosticStart + "\nAfter: " + d2 : "") + "\n\n"); fullEvaluationError = true; } } } // assert Profiler.stopProfile("Restore"); if (!disableCoerce && mcmcOperator instanceof CoercableMCMCOperator) { coerceAcceptanceProbability((CoercableMCMCOperator) mcmcOperator, logr[0]); } if (usingFullEvaluation) { if (schedule.getMinimumAcceptAndRejectCount() >= minOperatorCountForFullEvaluation && currentState >= fullEvaluationCount) { // full evaluation is only switched off when each operator has done a // minimum number of operations (currently 1) and fullEvalationCount // operations in total. usingFullEvaluation = false; if (fullEvaluationError) { // If there has been an error then stop with an error throw new RuntimeException( "One or more evaluation errors occurred during the test phase of this\n" + "run. These errors imply critical errors which may produce incorrect\n" + "results."); } } } fireEndCurrentIteration(currentState); currentState += 1; } currentLength = currentState; return currentLength; } public void terminateChain() { fireFinished(currentLength); // Profiler.report(); } public Likelihood getLikelihood() { return likelihood; } public Model getModel() { return likelihood.getModel(); } public OperatorSchedule getSchedule() { return schedule; } public Acceptor getAcceptor() { return acceptor; } public double getInitialScore() { return initialScore; } public double getBestScore() { return bestScore; } public long getCurrentLength() { return currentLength; } public void setCurrentLength(long currentLength) { this.currentLength = currentLength; } public double getCurrentScore() { return currentScore; } public void pleaseStop() { pleaseStop = true; } public boolean isStopped() { return isStopped; } public double evaluate() { return evaluate(likelihood); } protected double evaluate(Likelihood likelihood) { double logPosterior = 0.0; final double logLikelihood = likelihood.getLogLikelihood(); if (Double.isNaN(logLikelihood)) { return Double.NEGATIVE_INFINITY; } // System.err.println("** " + logPosterior + " + " + logLikelihood + // " = " + (logPosterior + logLikelihood)); logPosterior += logLikelihood; return logPosterior; } /** * Updates the proposal parameter, based on the target acceptance * probability This method relies on the proposal parameter being a * decreasing function of acceptance probability. * * @param op The operator * @param logr */ private void coerceAcceptanceProbability(CoercableMCMCOperator op, double logr) { if (DEBUG) { System.out.println("coerceAcceptanceProbability " + isCoercable(op)); } if (isCoercable(op)) { final double p = op.getCoercableParameter(); final double i = schedule.getOptimizationTransform(MCMCOperator.Utils.getOperationCount(op)); final double target = op.getTargetAcceptanceProbability(); final double newp = p + ((1.0 / (i + 1.0)) * (Math.exp(logr) - target)); if (newp > -Double.MAX_VALUE && newp < Double.MAX_VALUE) { op.setCoercableParameter(newp); if (DEBUG) { System.out.println("Setting coercable parameter: " + newp + " target: " + target + " logr: " + logr); } } } } private boolean isCoercable(CoercableMCMCOperator op) { return op.getMode() == CoercionMode.COERCION_ON || (op.getMode() != CoercionMode.COERCION_OFF && useCoercion); } public void addMarkovChainListener(MarkovChainListener listener) { if (listener != null) { listeners.add(listener); } } public void removeMarkovChainListener(MarkovChainListener listener) { listeners.remove(listener); } private void fireBestModel(long state, Model bestModel) { for (MarkovChainListener listener : listeners) { listener.bestState(state, this, bestModel); } } private void fireCurrentModel(long state, Model currentModel) { for (MarkovChainListener listener : listeners) { listener.currentState(state, this, currentModel); } } private void fireFinished(long chainLength) { for (MarkovChainListener listener : listeners) { listener.finished(chainLength, this); } } private void fireEndCurrentIteration(long state) { } private final ArrayList<MarkovChainListener> listeners = new ArrayList<MarkovChainListener>(); }
Additional output in debug mode.
src/dr/inference/markovchain/MarkovChain.java
Additional output in debug mode.
Java
lgpl-2.1
29923829f3cb69f1a2b657612c41c7183b10772a
0
sewe/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,sewe/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,sewe/spotbugs,KengoTODA/spotbugs
/* * Bytecode Analysis Framework * Copyright (C) 2003,2004 University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs.ba; import java.util.*; import java.io.*; import org.apache.bcel.*; import org.apache.bcel.classfile.*; import org.apache.bcel.generic.*; /** * A test driver for dataflow analysis classes. * It runs the dataflow analysis on the methods of a single class, * and has options (properties) to restrict the analysis to a single * method, and to print out a CFG annotated with dataflow values. * * @see Dataflow * @see DataflowAnalysis * @author David Hovemeyer */ public abstract class DataflowTestDriver<Fact, AnalysisType extends AbstractDataflowAnalysis<Fact>> { private static class DataflowCFGPrinter<Fact, AnalysisType extends AbstractDataflowAnalysis<Fact>> extends CFGPrinter { private Dataflow<Fact, AnalysisType> dataflow; private AnalysisType analysis; public DataflowCFGPrinter(CFG cfg, Dataflow<Fact, AnalysisType> dataflow, AnalysisType analysis) { super(cfg); this.dataflow = dataflow; this.analysis = analysis; } public String blockStartAnnotate(BasicBlock bb) { return " " + analysis.factToString(dataflow.getStartFact(bb)); } public String blockAnnotate(BasicBlock bb) { return " " + analysis.factToString(dataflow.getResultFact(bb)); } public String instructionAnnotate(InstructionHandle handle, BasicBlock bb) { try { Fact result = analysis.getFactAtLocation(new Location(handle, bb)); return " " + analysis.factToString(result); } catch (DataflowAnalysisException e) { throw new IllegalStateException("Caught exception: " + e.toString()); } } } /** * Execute the analysis on a single class. * @param filename the name of the class file */ public void execute(String filename) throws DataflowAnalysisException, CFGBuilderException, IOException { JavaClass jclass = new RepositoryClassParser(filename).parse(); final RepositoryLookupFailureCallback lookupFailureCallback = new RepositoryLookupFailureCallback() { public void reportMissingClass(ClassNotFoundException e) { e.printStackTrace(); System.exit(1); } }; AnalysisContext.instance().setLookupFailureCallback(lookupFailureCallback); ClassContext classContext = AnalysisContext.instance().getClassContext(jclass); String methodName = System.getProperty("dataflow.method"); Method[] methods = jclass.getMethods(); for (int i = 0; i < methods.length; ++i) { Method method = methods[i]; if (methodName != null && !method.getName().equals(methodName)) continue; MethodGen methodGen = classContext.getMethodGen(method); if (methodGen == null) continue; System.out.println("-----------------------------------------------------------------"); System.out.println("Method: " + SignatureConverter.convertMethodSignature(methodGen)); System.out.println("-----------------------------------------------------------------"); execute(classContext, method); } } /** * Execute the analysis on a single method of a class. */ public void execute(ClassContext classContext, Method method) throws DataflowAnalysisException, CFGBuilderException { Dataflow<Fact, AnalysisType> dataflow = createDataflow(classContext, method); System.out.println("Finished in " + dataflow.getNumIterations() + " iterations"); CFG cfg = classContext.getCFG(method); examineResults(cfg, dataflow); if (Boolean.getBoolean("dataflow.printcfg")) { CFGPrinter p = new DataflowCFGPrinter<Fact, AnalysisType>(cfg, dataflow, dataflow.getAnalysis()); p.print(System.out); } } /** * Downcall method to create the dataflow driver object * and execute the analysis. * @param classContext ClassContext for the class * @param method the Method * @return the Dataflow driver */ public abstract Dataflow<Fact, AnalysisType> createDataflow(ClassContext classContext, Method method) throws CFGBuilderException, DataflowAnalysisException; /** * Downcall method to inspect the analysis results. * Need not be implemented by subclasses. * @param cfg the control flow graph * @param dataflow the analysis results */ public void examineResults(CFG cfg, Dataflow<Fact, AnalysisType> dataflow) { } } // vim:ts=4
findbugs/src/java/edu/umd/cs/findbugs/ba/DataflowTestDriver.java
/* * Bytecode Analysis Framework * Copyright (C) 2003,2004 University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs.ba; import java.util.*; import java.io.*; import org.apache.bcel.*; import org.apache.bcel.classfile.*; import org.apache.bcel.generic.*; /** * A test driver for dataflow analysis classes. * It runs the dataflow analysis on the methods of a single class, * and has options (properties) to restrict the analysis to a single * method, and to print out a CFG annotated with dataflow values. * * @see Dataflow * @see DataflowAnalysis * @author David Hovemeyer */ public abstract class DataflowTestDriver<Fact, AnalysisType extends AbstractDataflowAnalysis<Fact>> { private static class DataflowCFGPrinter<Fact, AnalysisType extends AbstractDataflowAnalysis<Fact>> extends CFGPrinter { private Dataflow<Fact, AnalysisType> dataflow; private AnalysisType analysis; public DataflowCFGPrinter(CFG cfg, Dataflow<Fact, AnalysisType> dataflow, AnalysisType analysis) { super(cfg); this.dataflow = dataflow; this.analysis = analysis; } public String blockStartAnnotate(BasicBlock bb) { return " " + analysis.factToString(dataflow.getStartFact(bb)); } public String blockAnnotate(BasicBlock bb) { return " " + analysis.factToString(dataflow.getResultFact(bb)); } public String instructionAnnotate(InstructionHandle handle, BasicBlock bb) { try { Fact result = analysis.getFactAtLocation(new Location(handle, bb)); return " " + analysis.factToString(result); } catch (DataflowAnalysisException e) { throw new IllegalStateException("Caught exception: " + e.toString()); } } } /** * Execute the analysis on a single class. * @param filename the name of the class file */ public void execute(String filename) throws DataflowAnalysisException, CFGBuilderException, IOException { JavaClass jclass = new RepositoryClassParser(filename).parse(); final RepositoryLookupFailureCallback lookupFailureCallback = new RepositoryLookupFailureCallback() { public void reportMissingClass(ClassNotFoundException e) { e.printStackTrace(); System.exit(1); } }; ClassContext classContext = new ClassContext(jclass, lookupFailureCallback); String methodName = System.getProperty("dataflow.method"); Method[] methods = jclass.getMethods(); for (int i = 0; i < methods.length; ++i) { Method method = methods[i]; if (methodName != null && !method.getName().equals(methodName)) continue; MethodGen methodGen = classContext.getMethodGen(method); if (methodGen == null) continue; System.out.println("-----------------------------------------------------------------"); System.out.println("Method: " + SignatureConverter.convertMethodSignature(methodGen)); System.out.println("-----------------------------------------------------------------"); execute(classContext, method); } } /** * Execute the analysis on a single method of a class. */ public void execute(ClassContext classContext, Method method) throws DataflowAnalysisException, CFGBuilderException { Dataflow<Fact, AnalysisType> dataflow = createDataflow(classContext, method); System.out.println("Finished in " + dataflow.getNumIterations() + " iterations"); CFG cfg = classContext.getCFG(method); examineResults(cfg, dataflow); if (Boolean.getBoolean("dataflow.printcfg")) { CFGPrinter p = new DataflowCFGPrinter<Fact, AnalysisType>(cfg, dataflow, dataflow.getAnalysis()); p.print(System.out); } } /** * Downcall method to create the dataflow driver object * and execute the analysis. * @param classContext ClassContext for the class * @param method the Method * @return the Dataflow driver */ public abstract Dataflow<Fact, AnalysisType> createDataflow(ClassContext classContext, Method method) throws CFGBuilderException, DataflowAnalysisException; /** * Downcall method to inspect the analysis results. * Need not be implemented by subclasses. * @param cfg the control flow graph * @param dataflow the analysis results */ public void examineResults(CFG cfg, Dataflow<Fact, AnalysisType> dataflow) { } } // vim:ts=4
Changed to use AnalysisContext to get ClassContext. git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@1939 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
findbugs/src/java/edu/umd/cs/findbugs/ba/DataflowTestDriver.java
Changed to use AnalysisContext to get ClassContext.
Java
apache-2.0
b33b57a46783371f1773d3a7aedb0194db02e277
0
shubham166/presto,takari/presto,nakajijiji/presto,11xor6/presto,lingochamp/presto,idemura/presto,rockerbox/presto,totticarter/presto,Praveen2112/presto,miquelruiz/presto,martint/presto,haozhun/presto,ptkool/presto,ebyhr/presto,jf367/presto,mandusm/presto,prestodb/presto,nsabharwal/presto,gh351135612/presto,EvilMcJerkface/presto,youngwookim/presto,mcanthony/presto,Nasdaq/presto,facebook/presto,totticarter/presto,nezihyigitbasi/presto,erichwang/presto,ebd2/presto,haitaoyao/presto,mattyb149/presto,haozhun/presto,ebyhr/presto,Jimexist/presto,zhenyuy-fb/presto,kaschaeffer/presto,tellproject/presto,mpilman/presto,youngwookim/presto,svstanev/presto,kuzemchik/presto,zjshen/presto,fipar/presto,wrmsr/presto,11xor6/presto,tellproject/presto,aglne/presto,treasure-data/presto,zhenyuy-fb/presto,wagnermarkd/presto,deciament/presto,twitter-forks/presto,kingland/presto,zofuthan/presto,ipros-team/presto,wangcan2014/presto,XiaominZhang/presto,shixuan-fan/presto,smartpcr/presto,troels/nz-presto,cawallin/presto,nakajijiji/presto,zjshen/presto,fengshao0907/presto,miniway/presto,siddhartharay007/presto,jf367/presto,zhenyuy-fb/presto,Praveen2112/presto,wagnermarkd/presto,springning/presto,sumanth232/presto,hulu/presto,cosinequanon/presto,mattyb149/presto,cosinequanon/presto,hgschmie/presto,miquelruiz/presto,yuananf/presto,CHINA-JD/presto,soz-fb/presto,Yaliang/presto,erichwang/presto,kaschaeffer/presto,ebd2/presto,sumitkgec/presto,mugglmenzel/presto,nakajijiji/presto,jxiang/presto,jxiang/presto,kuzemchik/presto,kietly/presto,CHINA-JD/presto,Zoomdata/presto,ebyhr/presto,kined/presto,ajoabraham/presto,shixuan-fan/presto,losipiuk/presto,11xor6/presto,raghavsethi/presto,pwz3n0/presto,ocono-tech/presto,ocono-tech/presto,xiangel/presto,facebook/presto,smartpcr/presto,yu-yamada/presto,mandusm/presto,Nasdaq/presto,facebook/presto,mandusm/presto,mono-plane/presto,youngwookim/presto,nakajijiji/presto,y-lan/presto,cawallin/presto,raghavsethi/presto,mbeitchman/presto,tellproject/presto,wangcan2014/presto,Teradata/presto,jiangyifangh/presto,cawallin/presto,facebook/presto,ajoabraham/presto,wrmsr/presto,damiencarol/presto,youngwookim/presto,Jimexist/presto,nileema/presto,jekey/presto,fipar/presto,wangcan2014/presto,arhimondr/presto,RobinUS2/presto,aglne/presto,kuzemchik/presto,Teradata/presto,RobinUS2/presto,jiangyifangh/presto,cawallin/presto,yu-yamada/presto,bloomberg/presto,twitter-forks/presto,kaschaeffer/presto,avasilevskiy/presto,pwz3n0/presto,mattyb149/presto,denizdemir/presto,toxeh/presto,ajoabraham/presto,deciament/presto,mugglmenzel/presto,springning/presto,hgschmie/presto,propene/presto,CHINA-JD/presto,dongjoon-hyun/presto,propene/presto,jacobgao/presto,erichwang/presto,haitaoyao/presto,aleph-zero/presto,XiaominZhang/presto,sumitkgec/presto,cberner/presto,cawallin/presto,facebook/presto,albertocsm/presto,smartnews/presto,ocono-tech/presto,harunurhan/presto,shubham166/presto,jiekechoo/presto,prateek1306/presto,soz-fb/presto,dongjoon-hyun/presto,joy-yao/presto,gh351135612/presto,mode/presto,rockerbox/presto,dain/presto,zzhao0/presto,chrisunder/presto,soz-fb/presto,mbeitchman/presto,Teradata/presto,vermaravikant/presto,svstanev/presto,TeradataCenterForHadoop/bootcamp,kietly/presto,saidalaoui/presto,sopel39/presto,nsabharwal/presto,xiangel/presto,dain/presto,gcnonato/presto,pnowojski/presto,mcanthony/presto,Myrthan/presto,Myrthan/presto,mpilman/presto,dongjoon-hyun/presto,TeradataCenterForHadoop/bootcamp,nezihyigitbasi/presto,jiekechoo/presto,raghavsethi/presto,losipiuk/presto,miniway/presto,aramesh117/presto,albertocsm/presto,EvilMcJerkface/presto,mvp/presto,hulu/presto,propene/presto,smartpcr/presto,yu-yamada/presto,pnowojski/presto,TeradataCenterForHadoop/bootcamp,suyucs/presto,ocono-tech/presto,jf367/presto,mono-plane/presto,Zoomdata/presto,sumanth232/presto,prateek1306/presto,lingochamp/presto,dabaitu/presto,smartpcr/presto,avasilevskiy/presto,saidalaoui/presto,svstanev/presto,smartnews/presto,Myrthan/presto,Myrthan/presto,Teradata/presto,arhimondr/presto,EvilMcJerkface/presto,yu-yamada/presto,fipar/presto,raghavsethi/presto,ajoabraham/presto,wagnermarkd/presto,suyucs/presto,aramesh117/presto,sunchao/presto,Svjard/presto,takari/presto,takari/presto,martint/presto,DanielTing/presto,losipiuk/presto,prestodb/presto,saidalaoui/presto,wyukawa/presto,joy-yao/presto,chrisunder/presto,pwz3n0/presto,shubham166/presto,zzhao0/presto,ArturGajowy/presto,treasure-data/presto,sumanth232/presto,RobinUS2/presto,nezihyigitbasi/presto,denizdemir/presto,jekey/presto,svstanev/presto,cosinequanon/presto,nsabharwal/presto,gh351135612/presto,siddhartharay007/presto,fiedukow/presto,gh351135612/presto,sunchao/presto,mbeitchman/presto,treasure-data/presto,y-lan/presto,jacobgao/presto,lingochamp/presto,bloomberg/presto,kingland/presto,wrmsr/presto,dabaitu/presto,rockerbox/presto,saidalaoui/presto,CHINA-JD/presto,bloomberg/presto,avasilevskiy/presto,stewartpark/presto,zzhao0/presto,wrmsr/presto,cosinequanon/presto,jiekechoo/presto,smartpcr/presto,gcnonato/presto,arhimondr/presto,jxiang/presto,hulu/presto,elonazoulay/presto,erichwang/presto,ArturGajowy/presto,miniway/presto,Praveen2112/presto,takari/presto,springning/presto,mcanthony/presto,elonazoulay/presto,cberner/presto,ocono-tech/presto,miquelruiz/presto,lingochamp/presto,propene/presto,aglne/presto,mcanthony/presto,aramesh117/presto,tellproject/presto,miniway/presto,kietly/presto,ipros-team/presto,albertocsm/presto,kietly/presto,troels/nz-presto,hulu/presto,losipiuk/presto,toyama0919/presto,joy-yao/presto,fipar/presto,idemura/presto,kingland/presto,mattyb149/presto,zhenyuy-fb/presto,haozhun/presto,stewartpark/presto,shixuan-fan/presto,aramesh117/presto,nezihyigitbasi/presto,shixuan-fan/presto,nileema/presto,toyama0919/presto,cosinequanon/presto,kuzemchik/presto,jacobgao/presto,sunchao/presto,suyucs/presto,geraint0923/presto,TeradataCenterForHadoop/bootcamp,geraint0923/presto,yuananf/presto,toxeh/presto,jf367/presto,electrum/presto,totticarter/presto,DanielTing/presto,martint/presto,ebd2/presto,raghavsethi/presto,jxiang/presto,aleph-zero/presto,XiaominZhang/presto,electrum/presto,Svjard/presto,Yaliang/presto,prestodb/presto,yu-yamada/presto,nileema/presto,aglne/presto,dain/presto,gh351135612/presto,shixuan-fan/presto,11xor6/presto,sumitkgec/presto,toxeh/presto,cberner/presto,prestodb/presto,pnowojski/presto,sunchao/presto,Svjard/presto,yuananf/presto,mpilman/presto,prateek1306/presto,zofuthan/presto,Yaliang/presto,kuzemchik/presto,kaschaeffer/presto,vermaravikant/presto,treasure-data/presto,fengshao0907/presto,youngwookim/presto,sopel39/presto,stewartpark/presto,TeradataCenterForHadoop/bootcamp,propene/presto,damiencarol/presto,toyama0919/presto,kined/presto,prateek1306/presto,mono-plane/presto,rockerbox/presto,idemura/presto,toyama0919/presto,EvilMcJerkface/presto,sumitkgec/presto,sopel39/presto,nezihyigitbasi/presto,ptkool/presto,dain/presto,Myrthan/presto,hgschmie/presto,troels/nz-presto,Nasdaq/presto,dabaitu/presto,albertocsm/presto,siddhartharay007/presto,fipar/presto,zzhao0/presto,kined/presto,Yaliang/presto,CHINA-JD/presto,smartnews/presto,fiedukow/presto,EvilMcJerkface/presto,wyukawa/presto,ArturGajowy/presto,suyucs/presto,elonazoulay/presto,prateek1306/presto,wagnermarkd/presto,jekey/presto,avasilevskiy/presto,erichwang/presto,zofuthan/presto,tomz/presto,DanielTing/presto,mpilman/presto,elonazoulay/presto,aramesh117/presto,wagnermarkd/presto,vermaravikant/presto,RobinUS2/presto,vermaravikant/presto,kaschaeffer/presto,mvp/presto,damiencarol/presto,ipros-team/presto,DanielTing/presto,harunurhan/presto,zofuthan/presto,ebd2/presto,Jimexist/presto,wangcan2014/presto,joy-yao/presto,mugglmenzel/presto,mandusm/presto,siddhartharay007/presto,miquelruiz/presto,zzhao0/presto,mbeitchman/presto,dain/presto,ebd2/presto,jacobgao/presto,wyukawa/presto,martint/presto,geraint0923/presto,y-lan/presto,stewartpark/presto,chrisunder/presto,dongjoon-hyun/presto,tomz/presto,fiedukow/presto,Praveen2112/presto,yuananf/presto,takari/presto,mode/presto,ptkool/presto,springning/presto,shubham166/presto,idemura/presto,gcnonato/presto,kined/presto,haozhun/presto,kingland/presto,jiekechoo/presto,arhimondr/presto,tomz/presto,prestodb/presto,haitaoyao/presto,Yaliang/presto,haitaoyao/presto,sumitkgec/presto,Zoomdata/presto,saidalaoui/presto,ArturGajowy/presto,sumanth232/presto,jekey/presto,zjshen/presto,mugglmenzel/presto,troels/nz-presto,ebyhr/presto,zhenyuy-fb/presto,cberner/presto,ptkool/presto,nakajijiji/presto,harunurhan/presto,zjshen/presto,pwz3n0/presto,harunurhan/presto,sopel39/presto,vermaravikant/presto,martint/presto,Zoomdata/presto,XiaominZhang/presto,bloomberg/presto,nsabharwal/presto,zofuthan/presto,twitter-forks/presto,deciament/presto,mbeitchman/presto,mcanthony/presto,smartnews/presto,Zoomdata/presto,dongjoon-hyun/presto,hulu/presto,wyukawa/presto,joy-yao/presto,Jimexist/presto,soz-fb/presto,twitter-forks/presto,damiencarol/presto,mvp/presto,XiaominZhang/presto,arhimondr/presto,fengshao0907/presto,denizdemir/presto,jiekechoo/presto,xiangel/presto,miniway/presto,haozhun/presto,dabaitu/presto,denizdemir/presto,lingochamp/presto,springning/presto,sopel39/presto,soz-fb/presto,Jimexist/presto,fengshao0907/presto,haitaoyao/presto,tellproject/presto,ptkool/presto,cberner/presto,hgschmie/presto,treasure-data/presto,troels/nz-presto,gcnonato/presto,siddhartharay007/presto,tellproject/presto,twitter-forks/presto,nsabharwal/presto,treasure-data/presto,losipiuk/presto,totticarter/presto,tomz/presto,mvp/presto,electrum/presto,wangcan2014/presto,electrum/presto,Praveen2112/presto,Svjard/presto,nileema/presto,fengshao0907/presto,RobinUS2/presto,smartnews/presto,jiangyifangh/presto,ArturGajowy/presto,aleph-zero/presto,geraint0923/presto,jiangyifangh/presto,wyukawa/presto,deciament/presto,idemura/presto,kined/presto,ebyhr/presto,totticarter/presto,bloomberg/presto,Svjard/presto,sunchao/presto,pwz3n0/presto,rockerbox/presto,Nasdaq/presto,mode/presto,jf367/presto,aleph-zero/presto,hgschmie/presto,toyama0919/presto,mono-plane/presto,11xor6/presto,aleph-zero/presto,fiedukow/presto,electrum/presto,jekey/presto,fiedukow/presto,kietly/presto,Teradata/presto,chrisunder/presto,kingland/presto,elonazoulay/presto,mpilman/presto,mpilman/presto,svstanev/presto,mode/presto,dabaitu/presto,aglne/presto,prestodb/presto,damiencarol/presto,xiangel/presto,mandusm/presto,wrmsr/presto,ipros-team/presto,wrmsr/presto,toxeh/presto,albertocsm/presto,geraint0923/presto,jiangyifangh/presto,toxeh/presto,mvp/presto,deciament/presto,tomz/presto,nileema/presto,harunurhan/presto,DanielTing/presto,Nasdaq/presto,avasilevskiy/presto,stewartpark/presto,yuananf/presto,suyucs/presto,sumanth232/presto,mattyb149/presto,pnowojski/presto,xiangel/presto,mugglmenzel/presto,y-lan/presto,y-lan/presto,miquelruiz/presto,chrisunder/presto,mode/presto,ipros-team/presto,jxiang/presto,zjshen/presto,ajoabraham/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.operator.HttpPageBufferClient.ClientCallback; import com.facebook.presto.spi.Page; import com.google.common.base.Function; import com.google.common.base.Stopwatch; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableListMultimap; import io.airlift.http.client.HttpStatus; import io.airlift.http.client.Request; import io.airlift.http.client.Response; import io.airlift.http.client.testing.TestingHttpClient; import io.airlift.http.client.testing.TestingResponse; import io.airlift.testing.TestingTicker; import io.airlift.units.DataSize; import io.airlift.units.DataSize.Unit; import io.airlift.units.Duration; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import javax.annotation.Nullable; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static com.facebook.presto.PrestoMediaTypes.PRESTO_PAGES; import static com.facebook.presto.spi.StandardErrorCode.PAGE_TOO_LARGE; import static com.facebook.presto.spi.StandardErrorCode.PAGE_TRANSPORT_ERROR; import static com.facebook.presto.spi.StandardErrorCode.PAGE_TRANSPORT_TIMEOUT; import static com.facebook.presto.testing.TestingBlockEncodingManager.createTestingBlockEncodingManager; import static com.facebook.presto.util.Failures.WORKER_NODE_ERROR; import static com.google.common.net.HttpHeaders.CONTENT_TYPE; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.testing.Assertions.assertContains; import static io.airlift.testing.Assertions.assertInstanceOf; import static java.util.concurrent.Executors.newScheduledThreadPool; import static org.testng.Assert.assertEquals; public class TestHttpPageBufferClient { private ScheduledExecutorService executor; @BeforeClass public void setUp() { executor = newScheduledThreadPool(4, daemonThreadsNamed("test-%s")); } @AfterClass public void tearDown() { if (executor != null) { executor.shutdownNow(); executor = null; } } @Test public void testHappyPath() throws Exception { Page expectedPage = new Page(100); DataSize expectedMaxSize = new DataSize(11, Unit.MEGABYTE); MockExchangeRequestProcessor processor = new MockExchangeRequestProcessor(expectedMaxSize); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), expectedMaxSize, new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // fetch a page and verify processor.addPage(location, expectedPage); callback.resetStats(); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 1); assertPageEquals(expectedPage, callback.getPages().get(0)); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertStatus(client, location, "queued", 1, 1, 1, 0, "not scheduled"); // fetch no data and verify callback.resetStats(); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertStatus(client, location, "queued", 1, 2, 2, 0, "not scheduled"); // fetch two more pages and verify processor.addPage(location, expectedPage); processor.addPage(location, expectedPage); callback.resetStats(); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 2); assertPageEquals(expectedPage, callback.getPages().get(0)); assertPageEquals(expectedPage, callback.getPages().get(1)); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 0); callback.resetStats(); assertStatus(client, location, "queued", 3, 3, 3, 0, "not scheduled"); // finish and verify callback.resetStats(); processor.setComplete(location); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 0); assertEquals(callback.getFinishedBuffers(), 1); assertEquals(callback.getFailedBuffers(), 0); assertStatus(client, location, "closed", 3, 4, 4, 0, "not scheduled"); } @Test public void testLifecycle() throws Exception { CyclicBarrier beforeRequest = new CyclicBarrier(2); CyclicBarrier afterRequest = new CyclicBarrier(2); StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest); processor.setResponse(new TestingResponse(HttpStatus.NO_CONTENT, ImmutableListMultimap.<String, String>of(), new byte[0])); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); client.scheduleRequest(); beforeRequest.await(1, TimeUnit.SECONDS); assertStatus(client, location, "running", 0, 1, 0, 0, "PROCESSING_REQUEST"); assertEquals(client.isRunning(), true); afterRequest.await(1, TimeUnit.SECONDS); requestComplete.await(1, TimeUnit.SECONDS); assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled"); client.close(); assertStatus(client, location, "closed", 0, 1, 1, 1, "not scheduled"); } @Test public void testInvalidResponses() throws Exception { CyclicBarrier beforeRequest = new CyclicBarrier(1); CyclicBarrier afterRequest = new CyclicBarrier(1); StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // send not found response and verify response was ignored processor.setResponse(new TestingResponse(HttpStatus.NOT_FOUND, ImmutableListMultimap.of(CONTENT_TYPE, PRESTO_PAGES), new byte[0])); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportErrorException.class); assertContains(callback.getFailure().getMessage(), "Expected response code to be 200, but was 404 Not Found"); assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled"); // send invalid content type response and verify response was ignored callback.resetStats(); processor.setResponse(new TestingResponse(HttpStatus.OK, ImmutableListMultimap.of(CONTENT_TYPE, "INVALID_TYPE"), new byte[0])); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportErrorException.class); assertContains(callback.getFailure().getMessage(), "Expected application/x-presto-pages response from server but got INVALID_TYPE"); assertStatus(client, location, "queued", 0, 2, 2, 2, "not scheduled"); // send unexpected content type response and verify response was ignored callback.resetStats(); processor.setResponse(new TestingResponse(HttpStatus.OK, ImmutableListMultimap.of(CONTENT_TYPE, "text/plain"), new byte[0])); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportErrorException.class); assertContains(callback.getFailure().getMessage(), "Expected application/x-presto-pages response from server but got text/plain"); assertStatus(client, location, "queued", 0, 3, 3, 3, "not scheduled"); // close client and verify client.close(); assertStatus(client, location, "closed", 0, 3, 3, 3, "not scheduled"); } @Test public void testCloseDuringPendingRequest() throws Exception { CyclicBarrier beforeRequest = new CyclicBarrier(2); CyclicBarrier afterRequest = new CyclicBarrier(2); StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest); processor.setResponse(new TestingResponse(HttpStatus.NO_CONTENT, ImmutableListMultimap.<String, String>of(), new byte[0])); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // send request client.scheduleRequest(); beforeRequest.await(1, TimeUnit.SECONDS); assertStatus(client, location, "running", 0, 1, 0, 0, "PROCESSING_REQUEST"); assertEquals(client.isRunning(), true); // request is pending, now close it client.close(); try { requestComplete.await(1, TimeUnit.SECONDS); } catch (BrokenBarrierException ignored) { } assertStatus(client, location, "closed", 0, 1, 1, 1, "not scheduled"); } @Test public void testExceptionFromResponseHandler() throws Exception { TestingTicker ticker = new TestingTicker(); AtomicReference<Duration> tickerIncrement = new AtomicReference<>(new Duration(0, TimeUnit.SECONDS)); Function<Request, Response> processor = (input) -> { Duration delta = tickerIncrement.get(); ticker.increment(delta.toMillis(), TimeUnit.MILLISECONDS); throw new RuntimeException("Foo"); }; CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted(ticker)); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // request processor will throw exception, verify the request is marked a completed // this starts the error stopwatch client.scheduleRequest(); requestComplete.await(10, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 0); assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled"); // advance time forward, but not enough to fail the client tickerIncrement.set(new Duration(30, TimeUnit.SECONDS)); // verify that the client has not failed client.scheduleRequest(); requestComplete.await(10, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 2); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 0); assertStatus(client, location, "queued", 0, 2, 2, 2, "not scheduled"); // advance time forward beyond the minimum error duration tickerIncrement.set(new Duration(31, TimeUnit.SECONDS)); // verify that the client has failed client.scheduleRequest(); requestComplete.await(10, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 3); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportTimeoutException.class); assertContains(callback.getFailure().getMessage(), WORKER_NODE_ERROR + " (http://localhost:8080/0 - requests failed for 61.00s)"); assertStatus(client, location, "queued", 0, 3, 3, 3, "not scheduled"); } @Test public void testErrorCodes() throws Exception { assertEquals(new PageTooLargeException().getErrorCode(), PAGE_TOO_LARGE.toErrorCode()); assertEquals(new PageTransportErrorException("").getErrorCode(), PAGE_TRANSPORT_ERROR.toErrorCode()); assertEquals(new PageTransportTimeoutException("", null).getErrorCode(), PAGE_TRANSPORT_TIMEOUT.toErrorCode()); } private static void assertStatus( HttpPageBufferClient client, URI location, String status, int pagesReceived, int requestsScheduled, int requestsCompleted, int requestsFailed, String httpRequestState) { PageBufferClientStatus actualStatus = client.getStatus(); assertEquals(actualStatus.getUri(), location); assertEquals(actualStatus.getState(), status, "status"); assertEquals(actualStatus.getPagesReceived(), pagesReceived, "pagesReceived"); assertEquals(actualStatus.getRequestsScheduled(), requestsScheduled, "requestsScheduled"); assertEquals(actualStatus.getRequestsCompleted(), requestsCompleted, "requestsCompleted"); assertEquals(actualStatus.getRequestsFailed(), requestsFailed, "requestsFailed"); assertEquals(actualStatus.getHttpRequestState(), httpRequestState, "httpRequestState"); } private static void assertPageEquals(Page expectedPage, Page actualPage) { assertEquals(actualPage.getPositionCount(), expectedPage.getPositionCount()); assertEquals(actualPage.getChannelCount(), expectedPage.getChannelCount()); } private static class TestingClientCallback implements ClientCallback { private final CyclicBarrier done; private final List<Page> pages = Collections.synchronizedList(new ArrayList<>()); private final AtomicInteger completedRequests = new AtomicInteger(); private final AtomicInteger finishedBuffers = new AtomicInteger(); private final AtomicInteger failedBuffers = new AtomicInteger(); private final AtomicReference<Throwable> failure = new AtomicReference<>(); public TestingClientCallback(CyclicBarrier done) { this.done = done; } public List<Page> getPages() { return pages; } private int getCompletedRequests() { return completedRequests.get(); } private int getFinishedBuffers() { return finishedBuffers.get(); } public int getFailedBuffers() { return failedBuffers.get(); } public Throwable getFailure() { return failure.get(); } @Override public void addPage(HttpPageBufferClient client, Page page) { pages.add(page); } @Override public void requestComplete(HttpPageBufferClient client) { completedRequests.getAndIncrement(); try { done.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } } @Override public void clientFinished(HttpPageBufferClient client) { finishedBuffers.getAndIncrement(); try { done.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } } @Override public void clientFailed(HttpPageBufferClient client, Throwable cause) { failedBuffers.getAndIncrement(); failure.compareAndSet(null, cause); // requestComplete() will be called after this } public void resetStats() { pages.clear(); completedRequests.set(0); finishedBuffers.set(0); failedBuffers.set(0); failure.set(null); } } private static class StaticRequestProcessor implements Function<Request, Response> { private final AtomicReference<Response> response = new AtomicReference<>(); private final CyclicBarrier beforeRequest; private final CyclicBarrier afterRequest; private StaticRequestProcessor(CyclicBarrier beforeRequest, CyclicBarrier afterRequest) { this.beforeRequest = beforeRequest; this.afterRequest = afterRequest; } private void setResponse(Response response) { this.response.set(response); } @SuppressWarnings("ThrowFromFinallyBlock") @Override public Response apply(@Nullable Request request) { try { beforeRequest.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } try { return response.get(); } finally { try { afterRequest.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } } } } }
presto-main/src/test/java/com/facebook/presto/operator/TestHttpPageBufferClient.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.operator.HttpPageBufferClient.ClientCallback; import com.facebook.presto.spi.Page; import com.google.common.base.Function; import com.google.common.base.Stopwatch; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableListMultimap; import io.airlift.http.client.HttpStatus; import io.airlift.http.client.Request; import io.airlift.http.client.Response; import io.airlift.http.client.testing.TestingHttpClient; import io.airlift.http.client.testing.TestingResponse; import io.airlift.testing.TestingTicker; import io.airlift.units.DataSize; import io.airlift.units.DataSize.Unit; import io.airlift.units.Duration; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import javax.annotation.Nullable; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static com.facebook.presto.PrestoMediaTypes.PRESTO_PAGES; import static com.facebook.presto.spi.StandardErrorCode.PAGE_TOO_LARGE; import static com.facebook.presto.spi.StandardErrorCode.PAGE_TRANSPORT_ERROR; import static com.facebook.presto.spi.StandardErrorCode.PAGE_TRANSPORT_TIMEOUT; import static com.facebook.presto.testing.TestingBlockEncodingManager.createTestingBlockEncodingManager; import static com.facebook.presto.util.Failures.WORKER_NODE_ERROR; import static com.google.common.net.HttpHeaders.CONTENT_TYPE; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.testing.Assertions.assertContains; import static io.airlift.testing.Assertions.assertInstanceOf; import static java.util.concurrent.Executors.newScheduledThreadPool; import static org.testng.Assert.assertEquals; public class TestHttpPageBufferClient { private ScheduledExecutorService executor; @BeforeClass public void setUp() { executor = newScheduledThreadPool(4, daemonThreadsNamed("test-%s")); } @AfterClass public void tearDown() { if (executor != null) { executor.shutdownNow(); executor = null; } } @Test public void testHappyPath() throws Exception { Page expectedPage = new Page(100); DataSize expectedMaxSize = new DataSize(11, Unit.MEGABYTE); MockExchangeRequestProcessor processor = new MockExchangeRequestProcessor(expectedMaxSize); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), expectedMaxSize, new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // fetch a page and verify processor.addPage(location, expectedPage); callback.resetStats(); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 1); assertPageEquals(expectedPage, callback.getPages().get(0)); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertStatus(client, location, "queued", 1, 1, 1, 0, "not scheduled"); // fetch no data and verify callback.resetStats(); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertStatus(client, location, "queued", 1, 2, 2, 0, "not scheduled"); // fetch two more pages and verify processor.addPage(location, expectedPage); processor.addPage(location, expectedPage); callback.resetStats(); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 2); assertPageEquals(expectedPage, callback.getPages().get(0)); assertPageEquals(expectedPage, callback.getPages().get(1)); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 0); callback.resetStats(); assertStatus(client, location, "queued", 3, 3, 3, 0, "not scheduled"); // finish and verify callback.resetStats(); processor.setComplete(location); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 0); assertEquals(callback.getFinishedBuffers(), 1); assertEquals(callback.getFailedBuffers(), 0); assertStatus(client, location, "closed", 3, 4, 4, 0, "not scheduled"); } @Test public void testLifecycle() throws Exception { CyclicBarrier beforeRequest = new CyclicBarrier(2); CyclicBarrier afterRequest = new CyclicBarrier(2); StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest); processor.setResponse(new TestingResponse(HttpStatus.NO_CONTENT, ImmutableListMultimap.<String, String>of(), new byte[0])); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); client.scheduleRequest(); beforeRequest.await(1, TimeUnit.SECONDS); assertStatus(client, location, "running", 0, 1, 0, 0, "PROCESSING_REQUEST"); assertEquals(client.isRunning(), true); afterRequest.await(1, TimeUnit.SECONDS); requestComplete.await(1, TimeUnit.SECONDS); assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled"); client.close(); assertStatus(client, location, "closed", 0, 1, 1, 1, "not scheduled"); } @Test public void testInvalidResponses() throws Exception { CyclicBarrier beforeRequest = new CyclicBarrier(1); CyclicBarrier afterRequest = new CyclicBarrier(1); StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // send not found response and verify response was ignored processor.setResponse(new TestingResponse(HttpStatus.NOT_FOUND, ImmutableListMultimap.of(CONTENT_TYPE, PRESTO_PAGES), new byte[0])); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportErrorException.class); assertContains(callback.getFailure().getMessage(), "Expected response code to be 200, but was 404 Not Found"); assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled"); // send invalid content type response and verify response was ignored callback.resetStats(); processor.setResponse(new TestingResponse(HttpStatus.OK, ImmutableListMultimap.of(CONTENT_TYPE, "INVALID_TYPE"), new byte[0])); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportErrorException.class); assertContains(callback.getFailure().getMessage(), "Expected application/x-presto-pages response from server but got INVALID_TYPE"); assertStatus(client, location, "queued", 0, 2, 2, 2, "not scheduled"); // send unexpected content type response and verify response was ignored callback.resetStats(); processor.setResponse(new TestingResponse(HttpStatus.OK, ImmutableListMultimap.of(CONTENT_TYPE, "text/plain"), new byte[0])); client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportErrorException.class); assertContains(callback.getFailure().getMessage(), "Expected application/x-presto-pages response from server but got text/plain"); assertStatus(client, location, "queued", 0, 3, 3, 3, "not scheduled"); // close client and verify client.close(); assertStatus(client, location, "closed", 0, 3, 3, 3, "not scheduled"); } @Test public void testCloseDuringPendingRequest() throws Exception { CyclicBarrier beforeRequest = new CyclicBarrier(2); CyclicBarrier afterRequest = new CyclicBarrier(2); StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest); processor.setResponse(new TestingResponse(HttpStatus.NO_CONTENT, ImmutableListMultimap.<String, String>of(), new byte[0])); CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted()); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // send request client.scheduleRequest(); beforeRequest.await(1, TimeUnit.SECONDS); assertStatus(client, location, "running", 0, 1, 0, 0, "PROCESSING_REQUEST"); assertEquals(client.isRunning(), true); // request is pending, now close it client.close(); try { requestComplete.await(1, TimeUnit.SECONDS); } catch (BrokenBarrierException ignored) { } assertStatus(client, location, "closed", 0, 1, 1, 1, "not scheduled"); } @Test public void testExceptionFromResponseHandler() throws Exception { TestingTicker ticker = new TestingTicker(); AtomicReference<Duration> tickerIncrement = new AtomicReference<>(new Duration(0, TimeUnit.SECONDS)); Function<Request, Response> processor = (input) -> { Duration delta = tickerIncrement.get(); ticker.increment(delta.toMillis(), TimeUnit.MILLISECONDS); throw new RuntimeException("Foo"); }; CyclicBarrier requestComplete = new CyclicBarrier(2); TestingClientCallback callback = new TestingClientCallback(requestComplete); URI location = URI.create("http://localhost:8080"); HttpPageBufferClient client = new HttpPageBufferClient(new TestingHttpClient(processor, executor), new DataSize(10, Unit.MEGABYTE), new Duration(1, TimeUnit.MINUTES), location, callback, createTestingBlockEncodingManager(), executor, Stopwatch.createUnstarted(ticker)); assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled"); // request processor will throw exception, verify the request is marked a completed // this starts the error stopwatch client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 1); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 0); assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled"); // advance time forward, but not enough to fail the client tickerIncrement.set(new Duration(30, TimeUnit.SECONDS)); // verify that the client has not failed client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 2); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 0); assertStatus(client, location, "queued", 0, 2, 2, 2, "not scheduled"); // advance time forward beyond the minimum error duration tickerIncrement.set(new Duration(31, TimeUnit.SECONDS)); // verify that the client has failed client.scheduleRequest(); requestComplete.await(1, TimeUnit.SECONDS); assertEquals(callback.getPages().size(), 0); assertEquals(callback.getCompletedRequests(), 3); assertEquals(callback.getFinishedBuffers(), 0); assertEquals(callback.getFailedBuffers(), 1); assertInstanceOf(callback.getFailure(), PageTransportTimeoutException.class); assertContains(callback.getFailure().getMessage(), WORKER_NODE_ERROR + " (http://localhost:8080/0 - requests failed for 61.00s)"); assertStatus(client, location, "queued", 0, 3, 3, 3, "not scheduled"); } @Test public void testErrorCodes() throws Exception { assertEquals(new PageTooLargeException().getErrorCode(), PAGE_TOO_LARGE.toErrorCode()); assertEquals(new PageTransportErrorException("").getErrorCode(), PAGE_TRANSPORT_ERROR.toErrorCode()); assertEquals(new PageTransportTimeoutException("", null).getErrorCode(), PAGE_TRANSPORT_TIMEOUT.toErrorCode()); } private static void assertStatus( HttpPageBufferClient client, URI location, String status, int pagesReceived, int requestsScheduled, int requestsCompleted, int requestsFailed, String httpRequestState) { PageBufferClientStatus actualStatus = client.getStatus(); assertEquals(actualStatus.getUri(), location); assertEquals(actualStatus.getState(), status, "status"); assertEquals(actualStatus.getPagesReceived(), pagesReceived, "pagesReceived"); assertEquals(actualStatus.getRequestsScheduled(), requestsScheduled, "requestsScheduled"); assertEquals(actualStatus.getRequestsCompleted(), requestsCompleted, "requestsCompleted"); assertEquals(actualStatus.getRequestsFailed(), requestsFailed, "requestsFailed"); assertEquals(actualStatus.getHttpRequestState(), httpRequestState, "httpRequestState"); } private static void assertPageEquals(Page expectedPage, Page actualPage) { assertEquals(actualPage.getPositionCount(), expectedPage.getPositionCount()); assertEquals(actualPage.getChannelCount(), expectedPage.getChannelCount()); } private static class TestingClientCallback implements ClientCallback { private final CyclicBarrier done; private final List<Page> pages = Collections.synchronizedList(new ArrayList<>()); private final AtomicInteger completedRequests = new AtomicInteger(); private final AtomicInteger finishedBuffers = new AtomicInteger(); private final AtomicInteger failedBuffers = new AtomicInteger(); private final AtomicReference<Throwable> failure = new AtomicReference<>(); public TestingClientCallback(CyclicBarrier done) { this.done = done; } public List<Page> getPages() { return pages; } private int getCompletedRequests() { return completedRequests.get(); } private int getFinishedBuffers() { return finishedBuffers.get(); } public int getFailedBuffers() { return failedBuffers.get(); } public Throwable getFailure() { return failure.get(); } @Override public void addPage(HttpPageBufferClient client, Page page) { pages.add(page); } @Override public void requestComplete(HttpPageBufferClient client) { completedRequests.getAndIncrement(); try { done.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } } @Override public void clientFinished(HttpPageBufferClient client) { finishedBuffers.getAndIncrement(); try { done.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } } @Override public void clientFailed(HttpPageBufferClient client, Throwable cause) { failedBuffers.getAndIncrement(); failure.compareAndSet(null, cause); // requestComplete() will be called after this } public void resetStats() { pages.clear(); completedRequests.set(0); finishedBuffers.set(0); failedBuffers.set(0); failure.set(null); } } private static class StaticRequestProcessor implements Function<Request, Response> { private final AtomicReference<Response> response = new AtomicReference<>(); private final CyclicBarrier beforeRequest; private final CyclicBarrier afterRequest; private StaticRequestProcessor(CyclicBarrier beforeRequest, CyclicBarrier afterRequest) { this.beforeRequest = beforeRequest; this.afterRequest = afterRequest; } private void setResponse(Response response) { this.response.set(response); } @SuppressWarnings("ThrowFromFinallyBlock") @Override public Response apply(@Nullable Request request) { try { beforeRequest.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } try { return response.get(); } finally { try { afterRequest.await(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } catch (BrokenBarrierException | TimeoutException e) { throw Throwables.propagate(e); } } } } }
Increase timeout in TestHttpPageBufferClient This is failing intermittently on Travis
presto-main/src/test/java/com/facebook/presto/operator/TestHttpPageBufferClient.java
Increase timeout in TestHttpPageBufferClient
Java
apache-2.0
f7430033e69d2ec5e0694e18e2819a03b42f1ae6
0
sangramjadhav/testrs
2288b5d4-2ece-11e5-905b-74de2bd44bed
hello.java
228824de-2ece-11e5-905b-74de2bd44bed
2288b5d4-2ece-11e5-905b-74de2bd44bed
hello.java
2288b5d4-2ece-11e5-905b-74de2bd44bed
Java
apache-2.0
0bfd3b02b468caa26260388ea4b989043ad6b8af
0
cushon/error-prone,cushon/error-prone,google/error-prone,cushon/error-prone,google/error-prone,cushon/error-prone
/* * Copyright 2018 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns.time; import static com.google.errorprone.BugPattern.ProvidesFix.REQUIRES_HUMAN_ATTENTION; import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; import com.google.common.collect.ImmutableSet; import com.google.errorprone.BugPattern; import com.google.errorprone.VisitorState; import com.google.errorprone.bugpatterns.BugChecker; import com.google.errorprone.bugpatterns.BugChecker.MethodInvocationTreeMatcher; import com.google.errorprone.fixes.SuggestedFix; import com.google.errorprone.matchers.Description; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.matchers.Matchers; import com.google.errorprone.util.ASTHelpers; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.tools.javac.code.Symbol.MethodSymbol; import com.sun.tools.javac.util.Name; import com.google.errorprone.fixes.SuggestedFixes; /** Check for calls to {@code java.time} APIs that silently use the default system time-zone. */ @BugPattern( name = "JavaTimeDefaultTimeZone", summary = "java.time APIs that silently use the default system time-zone are not allowed.", explanation = "Using APIs that silently use the default system time-zone is dangerous. " + "The default system time-zone can vary from machine to machine or JVM to JVM. " + "You must choose an explicit ZoneId." , severity = WARNING, providesFix = REQUIRES_HUMAN_ATTENTION) public final class JavaTimeDefaultTimeZone extends BugChecker implements MethodInvocationTreeMatcher { private static final ImmutableSet<String> NOW_STATIC = ImmutableSet.of( "java.time.LocalDate", "java.time.LocalDateTime", "java.time.LocalTime", "java.time.MonthDay", "java.time.OffsetDateTime", "java.time.OffsetTime", "java.time.Year", "java.time.YearMonth", "java.time.ZonedDateTime", "java.time.chrono.JapaneseDate", "java.time.chrono.MinguoDate", "java.time.chrono.HijrahDate", "java.time.chrono.ThaiBuddhistDate"); private static final ImmutableSet<String> DATE_NOW_INSTANCE = ImmutableSet.of( "java.time.chrono.Chronology", "java.time.chrono.HijrahChronology", "java.time.chrono.IsoChronology", "java.time.chrono.JapaneseChronology", "java.time.chrono.MinguoChronology", "java.time.chrono.ThaiBuddhistChronology"); private static final Matcher<ExpressionTree> CLOCK_MATCHER = Matchers.staticMethod() .onClass("java.time.Clock") .named("systemDefaultZone") .withParameters(); private static final Matcher<ExpressionTree> IN_JAVA_TIME = Matchers.packageStartsWith("java.time"); private static boolean matches(MethodInvocationTree tree) { if (!tree.getArguments().isEmpty()) { return false; } MethodSymbol symbol = ASTHelpers.getSymbol(tree); if (symbol == null) { return false; } Name methodName = symbol.getSimpleName(); if (methodName.contentEquals("now")) { return symbol.isStatic() && NOW_STATIC.contains(symbol.owner.getQualifiedName().toString()); } if (methodName.contentEquals("dateNow")) { return !symbol.isStatic() && DATE_NOW_INSTANCE.contains(symbol.owner.getQualifiedName().toString()); } if (methodName.contentEquals("systemDefaultZone")) { return symbol.isStatic() && symbol.owner.getQualifiedName().contentEquals("java.time.Clock"); } return false; } @Override public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) { if (!matches(tree) || IN_JAVA_TIME.matches(tree, state)) { return Description.NO_MATCH; } String idealReplacementCode = "ZoneId.of(\"America/Los_Angeles\")"; SuggestedFix.Builder fixBuilder = SuggestedFix.builder(); String zoneIdName = SuggestedFixes.qualifyType(state, fixBuilder, "java.time.ZoneId"); String replacementCode = zoneIdName + ".systemDefault()"; // The method could be statically imported and have no receiver: if so, just swap out the whole // tree as opposed to surgically replacing the post-receiver part.. ExpressionTree receiver = ASTHelpers.getReceiver(tree); // we special case Clock because the replacement isn't just an overload, but a new API entirely boolean systemDefaultZoneClockMethod = CLOCK_MATCHER.matches(tree, state); String replacementMethod = systemDefaultZoneClockMethod ? "system" : ASTHelpers.getSymbol(tree).name.toString(); if (receiver != null) { fixBuilder.replace( state.getEndPosition(receiver), state.getEndPosition(tree), "." + replacementMethod + "(" + replacementCode + ")"); } else { if (systemDefaultZoneClockMethod) { fixBuilder.addStaticImport("java.time.Clock.systemDefaultZone"); } fixBuilder.replace(tree, replacementMethod + "(" + replacementCode + ")"); } return buildDescription(tree) .setMessage( String.format( "%s.%s is not allowed because it silently uses the system default time-zone. You " + "must pass an explicit time-zone (e.g., %s) to this method.", ASTHelpers.getSymbol(tree).owner.getSimpleName(), ASTHelpers.getSymbol(tree), idealReplacementCode)) .addFix(fixBuilder.build()) .build(); } }
core/src/main/java/com/google/errorprone/bugpatterns/time/JavaTimeDefaultTimeZone.java
/* * Copyright 2018 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns.time; import static com.google.errorprone.BugPattern.ProvidesFix.REQUIRES_HUMAN_ATTENTION; import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; import com.google.common.collect.ImmutableSet; import com.google.errorprone.BugPattern; import com.google.errorprone.VisitorState; import com.google.errorprone.bugpatterns.BugChecker; import com.google.errorprone.bugpatterns.BugChecker.MethodInvocationTreeMatcher; import com.google.errorprone.fixes.SuggestedFix; import com.google.errorprone.matchers.Description; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.matchers.Matchers; import com.google.errorprone.util.ASTHelpers; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.tools.javac.code.Symbol.MethodSymbol; import com.sun.tools.javac.util.Name; import com.google.errorprone.fixes.SuggestedFixes; /** Check for calls to {@code java.time} APIs that silently use the default system time-zone. */ @BugPattern( name = "JavaTimeDefaultTimeZone", summary = "java.time APIs that silently use the default system time-zone are not allowed.", explanation = "Using APIs that silently use the default system time-zone is dangerous. " + "The default system time-zone can vary from machine to machine or JVM to JVM. " + "You must choose an explicit ZoneId." , severity = WARNING, providesFix = REQUIRES_HUMAN_ATTENTION) public final class JavaTimeDefaultTimeZone extends BugChecker implements MethodInvocationTreeMatcher { private static final ImmutableSet<String> NOW_STATIC = ImmutableSet.of( "java.time.LocalDate", "java.time.LocalDateTime", "java.time.LocalTime", "java.time.MonthDay", "java.time.OffsetDateTime", "java.time.OffsetTime", "java.time.Year", "java.time.YearMonth", "java.time.ZonedDateTime", "java.time.chrono.JapaneseDate", "java.time.chrono.MinguoDate", "java.time.chrono.HijrahDate", "java.time.chrono.ThaiBuddhistDate"); private static final ImmutableSet<String> DATE_NOW_INSTANCE = ImmutableSet.of( "java.time.chrono.Chronology", "java.time.chrono.HijrahChronology", "java.time.chrono.IsoChronology", "java.time.chrono.JapaneseChronology", "java.time.chrono.MinguoChronology", "java.time.chrono.ThaiBuddhistChronology"); private static final Matcher<ExpressionTree> CLOCK_MATCHER = Matchers.staticMethod() .onClass("java.time.Clock") .named("systemDefaultZone") .withParameters(); private static final Matcher<ExpressionTree> IN_JAVA_TIME = Matchers.packageStartsWith("java.time"); private static boolean matches(MethodInvocationTree tree, VisitorState state) { if (!tree.getArguments().isEmpty()) { return false; } MethodSymbol symbol = ASTHelpers.getSymbol(tree); if (symbol == null) { return false; } Name methodName = symbol.getSimpleName(); if (methodName.contentEquals("now")) { return symbol.isStatic() && NOW_STATIC.contains(symbol.owner.getQualifiedName().toString()); } if (methodName.contentEquals("dateNow")) { return !symbol.isStatic() && DATE_NOW_INSTANCE.contains(symbol.owner.getQualifiedName().toString()); } if (methodName.contentEquals("systemDefaultZone")) { return symbol.isStatic() && symbol.owner.getQualifiedName().contentEquals("java.time.Clock"); } return false; } @Override public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) { if (!matches(tree, state) || IN_JAVA_TIME.matches(tree, state)) { return Description.NO_MATCH; } String idealReplacementCode = "ZoneId.of(\"America/Los_Angeles\")"; SuggestedFix.Builder fixBuilder = SuggestedFix.builder(); String zoneIdName = SuggestedFixes.qualifyType(state, fixBuilder, "java.time.ZoneId"); String replacementCode = zoneIdName + ".systemDefault()"; // The method could be statically imported and have no receiver: if so, just swap out the whole // tree as opposed to surgically replacing the post-receiver part.. ExpressionTree receiver = ASTHelpers.getReceiver(tree); // we special case Clock because the replacement isn't just an overload, but a new API entirely boolean systemDefaultZoneClockMethod = CLOCK_MATCHER.matches(tree, state); String replacementMethod = systemDefaultZoneClockMethod ? "system" : ASTHelpers.getSymbol(tree).name.toString(); if (receiver != null) { fixBuilder.replace( state.getEndPosition(receiver), state.getEndPosition(tree), "." + replacementMethod + "(" + replacementCode + ")"); } else { if (systemDefaultZoneClockMethod) { fixBuilder.addStaticImport("java.time.Clock.systemDefaultZone"); } fixBuilder.replace(tree, replacementMethod + "(" + replacementCode + ")"); } return buildDescription(tree) .setMessage( String.format( "%s.%s is not allowed because it silently uses the system default time-zone. You " + "must pass an explicit time-zone (e.g., %s) to this method.", ASTHelpers.getSymbol(tree).owner.getSimpleName(), ASTHelpers.getSymbol(tree), idealReplacementCode)) .addFix(fixBuilder.build()) .build(); } }
Remove an unused parameter ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=251911580
core/src/main/java/com/google/errorprone/bugpatterns/time/JavaTimeDefaultTimeZone.java
Remove an unused parameter
Java
apache-2.0
fed55b219cee5990af5a650a51c7ab8a710e7a82
0
apache/commons-functor,apache/commons-functor
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.functor.core.composite; import java.io.Serializable; import org.apache.commons.functor.BinaryFunction; import org.apache.commons.functor.UnaryFunction; /** * A BinaryFunction whose result is then run through a UnaryFunction. * @version $Revision$ $Date$ * @author Matt Benson */ public class TransformedBinaryFunction<L, R, T> implements BinaryFunction<L, R, T>, Serializable { /** * serialVersionUID declaration. */ private static final long serialVersionUID = 3312781645741807814L; /** * Type-remembering helper * @param <X> */ private static final class Helper<X, L, R, T> implements BinaryFunction<L, R, T>, Serializable { /** * serialVersionUID declaration. */ private static final long serialVersionUID = 8141488776884860650L; private BinaryFunction<? super L, ? super R, ? extends X> preceding; private UnaryFunction<? super X, ? extends T> following; /** * Create a new Helper. * @param preceding BinaryFunction * @param following UnaryFunction */ private Helper(BinaryFunction<? super L, ? super R, ? extends X> preceding, UnaryFunction<? super X, ? extends T> following) { this.preceding = preceding; this.following = following; } /** * {@inheritDoc} */ public T evaluate(L left, R right) { return following.evaluate(preceding.evaluate(left, right)); } } private final Helper<?, L, R, T> helper; /** * Create a new TransformedBinaryFunction. * @param <X> * @param preceding BinaryFunction * @param following UnaryFunction */ public <X> TransformedBinaryFunction(BinaryFunction<? super L, ? super R, ? extends X> preceding, UnaryFunction<? super X, ? extends T> following) { this.helper = new Helper<X, L, R, T>(preceding, following); } /** * {@inheritDoc} */ public final T evaluate(L left, R right) { return helper.evaluate(left, right); } /** * {@inheritDoc} */ @Override public final boolean equals(Object obj) { return obj == this || obj instanceof TransformedBinaryFunction<?, ?, ?> && equals((TransformedBinaryFunction<?, ?, ?>) obj); } /** * Learn whether another TransformedBinaryFunction is equal to <code>this</code>. * @param that instance to test * @return whether equal */ public final boolean equals(TransformedBinaryFunction<?, ?, ?> that) { return that != null && that.helper.preceding.equals(this.helper.preceding) && that.helper.following.equals(this.helper.following); } /** * {@inheritDoc} */ @Override public final int hashCode() { int result = "TransformedBinaryFunction".hashCode(); result <<= 2; result |= helper.following.hashCode(); result <<= 2; result |= helper.preceding.hashCode(); return result; } /** * {@inheritDoc} */ @Override public String toString() { return "TransformedBinaryFunction<" + helper.preceding + "; " + helper.following + ">"; } }
src/main/java/org/apache/commons/functor/core/composite/TransformedBinaryFunction.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.functor.core.composite; import java.io.Serializable; import org.apache.commons.functor.BinaryFunction; import org.apache.commons.functor.UnaryFunction; /** * A BinaryFunction whose result is then run through a UnaryFunction. * @version $Revision$ $Date$ * @author Matt Benson */ public class TransformedBinaryFunction<L, R, T> implements BinaryFunction<L, R, T>, Serializable { /** * serialVersionUID declaration. */ private static final long serialVersionUID = 3312781645741807814L; /** * Type-remembering helper * @param <X> */ private static final class Helper<X, L, R, T> implements BinaryFunction<L, R, T>, Serializable { /** * serialVersionUID declaration. */ private static final long serialVersionUID = 8141488776884860650L; private BinaryFunction<? super L, ? super R, ? extends X> preceding; private UnaryFunction<? super X, ? extends T> following; /** * Create a new Helper. * @param preceding BinaryFunction * @param following UnaryFunction */ private Helper(BinaryFunction<? super L, ? super R, ? extends X> preceding, UnaryFunction<? super X, ? extends T> following) { this.preceding = preceding; this.following = following; } /** * {@inheritDoc} */ public T evaluate(L left, R right) { return following.evaluate(preceding.evaluate(left, right)); } } private final Helper<?, L, R, T> helper; /** * Create a new TransformedBinaryFunction. * @param <X> * @param preceding BinaryFunction * @param following UnaryFunction */ public <X> TransformedBinaryFunction(BinaryFunction<? super L, ? super R, ? extends X> preceding, UnaryFunction<? super X, ? extends T> following) { this.helper = new Helper<X, L, R, T>(preceding, following); } /** * {@inheritDoc} */ public final T evaluate(L left, R right) { return helper.evaluate(left, right); } /** * {@inheritDoc} */ @Override public final boolean equals(Object obj) { return obj == this || obj instanceof TransformedBinaryFunction<?, ?, ?> && equals((TransformedBinaryFunction<?, ?, ?>) obj); } /** * Learn whether another TransformedBinaryFunction is equal to <code>this</code>. * @param that instance to test * @return whether equal */ public final boolean equals(TransformedBinaryFunction<?, ?, ?> that) { return that != null && that.helper.preceding.equals(this.helper.preceding) && that.helper.following.equals(this.helper.following); } /** * {@inheritDoc} */ @Override public int hashCode() { int result = "TransformedBinaryFunction".hashCode(); result <<= 2; result |= helper.following.hashCode(); result <<= 2; result |= helper.preceding.hashCode(); return result; } /** * {@inheritDoc} */ @Override public String toString() { return "TransformedBinaryFunction<" + helper.preceding + "; " + helper.following + ">"; } }
fixed checkstyle violation: Method 'hashCode' is not designed for extension - needs to be abstract, final or empty git-svn-id: 7e88b6689c675bf212a536fca2a3ed2c50d982c1@1170797 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/commons/functor/core/composite/TransformedBinaryFunction.java
fixed checkstyle violation: Method 'hashCode' is not designed for extension - needs to be abstract, final or empty
Java
apache-2.0
b6ba9be6a8a8cc373d82e2f2d34c2bf2aad69015
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.actions.searcheverywhere; import com.google.common.collect.Lists; import com.intellij.find.findUsages.PsiElement2UsageTargetAdapter; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeBundle; import com.intellij.ide.SearchTopHitProvider; import com.intellij.ide.actions.BigPopupUI; import com.intellij.ide.actions.SearchEverywhereClassifier; import com.intellij.ide.actions.bigPopup.ShowFilterAction; import com.intellij.ide.actions.searcheverywhere.statistics.SearchEverywhereUsageTriggerCollector; import com.intellij.ide.actions.searcheverywhere.statistics.SearchFieldStatisticsCollector; import com.intellij.ide.util.ElementsChooser; import com.intellij.ide.util.gotoByName.GotoActionModel; import com.intellij.ide.util.gotoByName.QuickSearchComponent; import com.intellij.ide.util.gotoByName.SearchEverywhereConfiguration; import com.intellij.internal.statistic.eventLog.FeatureUsageData; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.AnActionListener; import com.intellij.openapi.actionSystem.impl.ActionMenu; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.progress.util.ProgressIndicatorBase; import com.intellij.openapi.progress.util.ProgressWindow; import com.intellij.openapi.progress.util.TooManyUsagesStatus; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.wm.ToolWindowId; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.psi.PsiElement; import com.intellij.psi.codeStyle.MinusculeMatcher; import com.intellij.psi.codeStyle.NameUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.*; import com.intellij.ui.components.JBLabel; import com.intellij.ui.components.JBList; import com.intellij.ui.components.fields.ExtendableTextField; import com.intellij.ui.popup.PopupUpdateProcessor; import com.intellij.ui.scale.JBUIScale; import com.intellij.usageView.UsageInfo; import com.intellij.usages.*; import com.intellij.usages.impl.UsageViewManagerImpl; import com.intellij.util.ArrayUtil; import com.intellij.util.Consumer; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.diff.Diff; import com.intellij.util.diff.FilesTooBigForDiffException; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.text.MatcherHolder; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import javax.swing.border.Border; import javax.swing.event.DocumentEvent; import java.awt.*; import java.awt.event.*; import java.util.List; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * @author Konstantin Bulenkov * @author Mikhail.Sokolov */ public final class SearchEverywhereUI extends BigPopupUI implements DataProvider, QuickSearchComponent { private static final Logger LOG = Logger.getInstance(SearchEverywhereUI.class); public static final String SEARCH_EVERYWHERE_SEARCH_FILED_KEY = "search-everywhere-textfield"; //only for testing purposes public static final int SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT = 30; public static final int MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT = 15; public static final int THROTTLING_TIMEOUT = 100; private static final SimpleTextAttributes SMALL_LABEL_ATTRS = new SimpleTextAttributes( SimpleTextAttributes.STYLE_SMALLER, JBUI.CurrentTheme.BigPopup.listTitleLabelForeground()); private final List<? extends SearchEverywhereContributor<?>> myShownContributors; private SearchListModel myListModel; private SETab mySelectedTab; private final List<SETab> myTabs = new ArrayList<>(); private final Function<String, String> myShortcutSupplier; private boolean myEverywhereAutoSet = true; private String myNotFoundString; private JBPopup myHint; private final SESearcher mySearcher; private final ThrottlingListenerWrapper myBufferedListener; private ProgressIndicator mySearchProgressIndicator; private final SEListSelectionTracker mySelectionTracker; private final PersistentSearchEverywhereContributorFilter<String> myContributorsFilter; private ActionToolbar myToolbar; public SearchEverywhereUI(@NotNull Project project, @NotNull List<? extends SearchEverywhereContributor<?>> contributors) { this(project, contributors, s -> null); } public SearchEverywhereUI(@NotNull Project project, @NotNull List<? extends SearchEverywhereContributor<?>> contributors, @NotNull Function<String, String> shortcutSupplier) { super(project); List<SEResultsEqualityProvider> equalityProviders = SEResultsEqualityProvider.getProviders(); myBufferedListener = new ThrottlingListenerWrapper(THROTTLING_TIMEOUT, mySearchListener, Runnable::run); mySearcher = new MultiThreadSearcher(myBufferedListener, run -> ApplicationManager.getApplication().invokeLater(run), equalityProviders); myShownContributors = contributors; myShortcutSupplier = shortcutSupplier; Map<String, String> namesMap = ContainerUtil.map2Map(contributors, c -> Pair.create(c.getSearchProviderId(), c.getFullGroupName())); myContributorsFilter = new PersistentSearchEverywhereContributorFilter<>( ContainerUtil.map(contributors, c -> c.getSearchProviderId()), SearchEverywhereConfiguration.getInstance(project), namesMap::get, c -> null); init(); initSearchActions(); myResultsList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); myResultsList.addListSelectionListener(e -> { int[] selectedIndices = myResultsList.getSelectedIndices(); if (selectedIndices.length > 1) { boolean multiSelection = Arrays.stream(selectedIndices) .allMatch(i -> myListModel.getContributorForIndex(i).isMultiSelectionSupported()); if (!multiSelection) { int index = myResultsList.getLeadSelectionIndex(); myResultsList.setSelectedIndex(index); } } }); mySelectionTracker = new SEListSelectionTracker(myResultsList, myListModel); myResultsList.addListSelectionListener(mySelectionTracker); Disposer.register(this, SearchFieldStatisticsCollector.createAndStart(mySearchField, myProject)); } @Override @NotNull protected CompositeCellRenderer createCellRenderer() { return new CompositeCellRenderer(); } @NotNull @Override public JBList<Object> createList() { myListModel = new SearchListModel(); addListDataListener(myListModel); return new JBList<>(myListModel); } public void toggleEverywhereFilter() { myEverywhereAutoSet = false; if (mySelectedTab.everywhereAction == null) return; if (!mySelectedTab.everywhereAction.canToggleEverywhere()) return; mySelectedTab.everywhereAction.setEverywhere( !mySelectedTab.everywhereAction.isEverywhere()); myToolbar.updateActionsImmediately(); } private void setEverywhereAuto(boolean everywhere) { myEverywhereAutoSet = true; if (mySelectedTab.everywhereAction == null) return; if (!mySelectedTab.everywhereAction.canToggleEverywhere()) return; mySelectedTab.everywhereAction.setEverywhere(everywhere); myToolbar.updateActionsImmediately(); } private boolean isEverywhere() { if (mySelectedTab.everywhereAction == null) return true; return mySelectedTab.everywhereAction.isEverywhere(); } private boolean canToggleEverywhere() { if (mySelectedTab.everywhereAction == null) return false; return mySelectedTab.everywhereAction.canToggleEverywhere(); } public void switchToContributor(@NotNull String contributorID) { SETab selectedTab = myTabs.stream() .filter(tab -> tab.getID().equals(contributorID)) .findAny() .orElseThrow(() -> new IllegalArgumentException(String.format("Contributor %s is not supported", contributorID))); switchToTab(selectedTab); } private void switchToNextTab() { int currentIndex = myTabs.indexOf(mySelectedTab); SETab nextTab = currentIndex == myTabs.size() - 1 ? myTabs.get(0) : myTabs.get(currentIndex + 1); switchToTab(nextTab); } private void switchToPrevTab() { int currentIndex = myTabs.indexOf(mySelectedTab); SETab prevTab = currentIndex == 0 ? myTabs.get(myTabs.size() - 1) : myTabs.get(currentIndex - 1); switchToTab(prevTab); } private void switchToTab(SETab tab) { boolean prevTabIsAll = mySelectedTab != null && isAllTabSelected(); mySelectedTab = tab; boolean nextTabIsAll = isAllTabSelected(); if (myEverywhereAutoSet && isEverywhere() && canToggleEverywhere()) { setEverywhereAuto(false); } updateSearchFieldAdvertisement(); if (prevTabIsAll != nextTabIsAll) { //reset cell renderer to show/hide group titles in "All" tab myResultsList.setCellRenderer(myResultsList.getCellRenderer()); } if (myToolbar != null) { myToolbar.updateActionsImmediately(); } repaint(); rebuildList(); } private final JLabel myAdvertisementLabel = new JBLabel(); { myAdvertisementLabel.setForeground(JBUI.CurrentTheme.BigPopup.searchFieldGrayForeground()); myAdvertisementLabel.setFont(RelativeFont.SMALL.derive(getFont())); } private void updateSearchFieldAdvertisement() { if (mySearchField == null) return; Boolean commandsSupported = mySelectedTab.getContributor() .map(contributor -> !contributor.getSupportedCommands().isEmpty()) .orElse(true); String advertisementText; if (commandsSupported) { advertisementText = IdeBundle.message("searcheverywhere.textfield.hint", SearchTopHitProvider.getTopHitAccelerator()); } else { advertisementText = mySelectedTab.getContributor().map(c -> c.getAdvertisement()).orElse(null); } mySearchField.remove(myAdvertisementLabel); if (advertisementText != null) { myAdvertisementLabel.setText(advertisementText); mySearchField.add(myAdvertisementLabel, BorderLayout.EAST); } } public String getSelectedContributorID() { return mySelectedTab.getID(); } @Nullable public Object getSelectionIdentity() { Object value = myResultsList.getSelectedValue(); return value == null ? null : Objects.hashCode(value); } @Override public void dispose() { stopSearching(); myListModel.clear(); } @Nullable @Override public Object getData(@NotNull String dataId) { IntStream indicesStream = Arrays.stream(myResultsList.getSelectedIndices()) .filter(i -> !myListModel.isMoreElement(i)); //common data section--------------------- if (PlatformDataKeys.PREDEFINED_TEXT.is(dataId)) { return getSearchPattern(); } if (CommonDataKeys.PROJECT.is(dataId)) { return myProject; } if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) { List<PsiElement> elements = indicesStream.mapToObj(i -> { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(i); Object item = myListModel.getElementAt(i); Object psi = contributor.getDataForItem(item, CommonDataKeys.PSI_ELEMENT.getName()); return (PsiElement)psi; }) .filter(Objects::nonNull) .collect(Collectors.toList()); return PsiUtilCore.toPsiElementArray(elements); } //item-specific data section-------------- return indicesStream.mapToObj(i -> { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(i); Object item = myListModel.getElementAt(i); return contributor.getDataForItem(item, dataId); }) .filter(Objects::nonNull) .findFirst() .orElse(null); } @Override public void registerHint(@NotNull JBPopup h) { if (myHint != null && myHint.isVisible() && myHint != h) { myHint.cancel(); } myHint = h; } @Override public void unregisterHint() { myHint = null; } private void hideHint() { if (myHint != null && myHint.isVisible()) { myHint.cancel(); } } private void updateHint(Object element) { if (myHint == null || !myHint.isVisible()) return; final PopupUpdateProcessor updateProcessor = myHint.getUserData(PopupUpdateProcessor.class); if (updateProcessor != null) { updateProcessor.updatePopup(element); } } private boolean isAllTabSelected() { return SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID.equals(getSelectedContributorID()); } @Override @NotNull protected JPanel createSettingsPanel() { DefaultActionGroup actionGroup = new DefaultActionGroup(); actionGroup.addAction(new ActionGroup() { @Override public AnAction @NotNull [] getChildren(@Nullable AnActionEvent e) { if (e == null || mySelectedTab == null) return EMPTY_ARRAY; return mySelectedTab.actions.toArray(EMPTY_ARRAY); } }); actionGroup.addAction(new ShowInFindToolWindowAction()); myToolbar = ActionManager.getInstance().createActionToolbar("search.everywhere.toolbar", actionGroup, true); myToolbar.setLayoutPolicy(ActionToolbar.NOWRAP_LAYOUT_POLICY); myToolbar.updateActionsImmediately(); JComponent toolbarComponent = myToolbar.getComponent(); toolbarComponent.setOpaque(false); toolbarComponent.setBorder(JBUI.Borders.empty(2, 18, 2, 9)); return (JPanel)toolbarComponent; } @NotNull @Override protected String getInitialHint() { return IdeBundle.message("searcheverywhere.history.shortcuts.hint", KeymapUtil.getKeystrokeText(SearchTextField.ALT_SHOW_HISTORY_KEYSTROKE), KeymapUtil.getKeystrokeText(SearchTextField.SHOW_HISTORY_KEYSTROKE)); } @Override protected @NotNull String getAccessibleName() { return IdeBundle.message("searcheverywhere.accessible.name"); } @NotNull @Override protected ExtendableTextField createSearchField() { SearchField res = new SearchField() { @NotNull @Override protected Extension getLeftExtension() { return new Extension() { @Override public Icon getIcon(boolean hovered) { return AllIcons.Actions.Search; } @Override public boolean isIconBeforeText() { return true; } @Override public int getIconGap() { return JBUIScale.scale(10); } }; } }; res.putClientProperty(SEARCH_EVERYWHERE_SEARCH_FILED_KEY, true); res.setLayout(new BorderLayout()); return res; } @Override protected void installScrollingActions() { ScrollingUtil.installMoveUpAction(myResultsList, getSearchField()); ScrollingUtil.installMoveDownAction(myResultsList, getSearchField()); } @Override @NotNull protected JPanel createTopLeftPanel() { JPanel contributorsPanel = new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0)); contributorsPanel.setOpaque(false); SETab allTab = new SETab(null); contributorsPanel.add(allTab); myTabs.add(allTab); myShownContributors.stream() .filter(SearchEverywhereContributor::isShownInSeparateTab) .forEach(contributor -> { SETab tab = new SETab(contributor); contributorsPanel.add(tab); myTabs.add(tab); }); return contributorsPanel; } private class SETab extends JLabel { final SearchEverywhereContributor<?> contributor; final List<AnAction> actions; final SearchEverywhereToggleAction everywhereAction; SETab(@Nullable SearchEverywhereContributor<?> contributor) { super(contributor == null ? IdeBundle.message("searcheverywhere.allelements.tab.name") : contributor.getGroupName()); this.contributor = contributor; updateTooltip(); Runnable onChanged = () -> { myToolbar.updateActionsImmediately(); rebuildList(); }; if (contributor == null) { String actionText = IdeUICustomization.getInstance().projectMessage("checkbox.include.non.project.items"); actions = Arrays.asList(new CheckBoxSearchEverywhereToggleAction(actionText) { final SearchEverywhereManagerImpl seManager = (SearchEverywhereManagerImpl)SearchEverywhereManager.getInstance(myProject); @Override public boolean isEverywhere() { return seManager.isEverywhere(); } @Override public void setEverywhere(boolean state) { seManager.setEverywhere(state); myTabs.stream() .filter(tab -> tab != SETab.this) .forEach(tab -> tab.everywhereAction.setEverywhere(state)); onChanged.run(); } }, new FiltersAction(myContributorsFilter, onChanged)); } else { actions = new ArrayList<>(contributor.getActions(onChanged)); } everywhereAction = (SearchEverywhereToggleAction)ContainerUtil.find(actions, o -> o instanceof SearchEverywhereToggleAction); Insets insets = JBUI.CurrentTheme.BigPopup.tabInsets(); setBorder(JBUI.Borders.empty(insets.top, insets.left, insets.bottom, insets.right)); addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { switchToTab(SETab.this); String reportableID = getContributor() .map(SearchEverywhereUsageTriggerCollector::getReportableContributorID) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(reportableID) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.TAB_SWITCHED, data); } }); } private void updateTooltip() { String shortcut = myShortcutSupplier.apply(getID()); if (shortcut != null) { setToolTipText(shortcut); } } public String getID() { return getContributor() .map(SearchEverywhereContributor::getSearchProviderId) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); } public Optional<SearchEverywhereContributor<?>> getContributor() { return Optional.ofNullable(contributor); } @Override public Dimension getPreferredSize() { Dimension size = super.getPreferredSize(); size.height = JBUIScale.scale(29); return size; } @Override public boolean isOpaque() { return mySelectedTab == this; } @Override public Color getBackground() { return mySelectedTab == this ? JBUI.CurrentTheme.BigPopup.selectedTabColor() : super.getBackground(); } @Override public Color getForeground() { return mySelectedTab == this ? JBUI.CurrentTheme.BigPopup.selectedTabTextColor() : super.getForeground(); } } private void rebuildList() { ApplicationManager.getApplication().assertIsDispatchThread(); stopSearching(); myResultsList.setEmptyText(IdeBundle.message("label.choosebyname.searching")); String rawPattern = getSearchPattern(); updateViewType(rawPattern.isEmpty() ? ViewType.SHORT : ViewType.FULL); String namePattern = mySelectedTab.getContributor() .map(contributor -> contributor.filterControlSymbols(rawPattern)) .orElse(rawPattern); MinusculeMatcher matcher = NameUtil.buildMatcherWithFallback("*" + rawPattern, "*" + namePattern, NameUtil.MatchingCaseSensitivity.NONE); MatcherHolder.associateMatcher(myResultsList, matcher); Map<SearchEverywhereContributor<?>, Integer> contributorsMap = new HashMap<>(); Optional<SearchEverywhereContributor<?>> selectedContributor = mySelectedTab.getContributor(); if (selectedContributor.isPresent()) { contributorsMap.put(selectedContributor.get(), SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT); } else { contributorsMap.putAll(getAllTabContributors().stream().collect(Collectors.toMap(c -> c, c -> MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT))); } List<SearchEverywhereContributor<?>> contributors = DumbService.getInstance(myProject).filterByDumbAwareness(contributorsMap.keySet()); if (contributors.isEmpty() && DumbService.isDumb(myProject)) { myResultsList.setEmptyText(IdeBundle.message("searcheverywhere.indexing.mode.not.supported", mySelectedTab.getText(), ApplicationNamesInfo.getInstance().getFullProductName())); myListModel.clear(); return; } if (contributors.size() != contributorsMap.size()) { myResultsList.setEmptyText(IdeBundle.message("searcheverywhere.indexing.incomplete.results", mySelectedTab.getText(), ApplicationNamesInfo.getInstance().getFullProductName())); } myListModel.expireResults(); contributors.forEach(contributor -> myListModel.setHasMore(contributor, false)); String commandPrefix = SearchTopHitProvider.getTopHitAccelerator(); if (rawPattern.startsWith(commandPrefix)) { String typedCommand = rawPattern.split(" ")[0].substring(commandPrefix.length()); List<SearchEverywhereCommandInfo> commands = getCommandsForCompletion(contributors, typedCommand); if (!commands.isEmpty()) { if (rawPattern.contains(" ")) { contributorsMap.keySet().retainAll(commands.stream() .map(SearchEverywhereCommandInfo::getContributor) .collect(Collectors.toSet())); } else { myListModel.clear(); List<SearchEverywhereFoundElementInfo> lst = ContainerUtil.map( commands, command -> new SearchEverywhereFoundElementInfo(command, 0, myStubCommandContributor)); myListModel.addElements(lst); ScrollingUtil.ensureSelectionExists(myResultsList); } } } mySearchProgressIndicator = mySearcher.search(contributorsMap, rawPattern); } private void initSearchActions() { MouseAdapter listMouseListener = new MouseAdapter() { private int currentDescriptionIndex = -1; @Override public void mouseClicked(MouseEvent e) { onMouseClicked(e); } @Override public void mouseMoved(MouseEvent e) { int index = myResultsList.locationToIndex(e.getPoint()); indexChanged(index); } @Override public void mouseExited(MouseEvent e) { int index = myResultsList.getSelectedIndex(); indexChanged(index); } private void indexChanged(int index) { if (index != currentDescriptionIndex) { currentDescriptionIndex = index; showDescriptionForIndex(index); } } }; myResultsList.addMouseMotionListener(listMouseListener); myResultsList.addMouseListener(listMouseListener); ScrollingUtil.redirectExpandSelection(myResultsList, mySearchField); Consumer<AnActionEvent> nextTabAction = e -> { switchToNextTab(); triggerTabSwitched(e); }; Consumer<AnActionEvent> prevTabAction = e -> { switchToPrevTab(); triggerTabSwitched(e); }; registerAction(SearchEverywhereActions.AUTOCOMPLETE_COMMAND, CompleteCommandAction::new); registerAction(SearchEverywhereActions.SWITCH_TO_NEXT_TAB, nextTabAction); registerAction(SearchEverywhereActions.SWITCH_TO_PREV_TAB, prevTabAction); registerAction(IdeActions.ACTION_NEXT_TAB, nextTabAction); registerAction(IdeActions.ACTION_PREVIOUS_TAB, prevTabAction); registerAction(IdeActions.ACTION_SWITCHER, e -> { if (e.getInputEvent().isShiftDown()) { switchToPrevTab(); } else { switchToNextTab(); } triggerTabSwitched(e); }); registerAction(SearchEverywhereActions.NAVIGATE_TO_NEXT_GROUP, e -> { fetchGroups(true); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(null) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.GROUP_NAVIGATE, data); }); registerAction(SearchEverywhereActions.NAVIGATE_TO_PREV_GROUP, e -> { fetchGroups(false); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(null) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.GROUP_NAVIGATE, data); }); registerSelectItemAction(); AnAction escape = ActionManager.getInstance().getAction("EditorEscape"); DumbAwareAction.create(__ -> closePopup()) .registerCustomShortcutSet(escape == null ? CommonShortcuts.ESCAPE : escape.getShortcutSet(), this); mySearchField.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(@NotNull DocumentEvent e) { String newSearchString = getSearchPattern(); if (myNotFoundString != null) { boolean newPatternContainsPrevious = myNotFoundString.length() > 1 && newSearchString.contains(myNotFoundString); if (myEverywhereAutoSet && isEverywhere() && canToggleEverywhere() && !newPatternContainsPrevious) { myNotFoundString = null; setEverywhereAuto(false); return; } } rebuildList(); } }); myResultsList.addListSelectionListener(e -> { Object selectedValue = myResultsList.getSelectedValue(); if (selectedValue != null && myHint != null && myHint.isVisible()) { updateHint(selectedValue); } showDescriptionForIndex(myResultsList.getSelectedIndex()); }); MessageBusConnection projectBusConnection = myProject.getMessageBus().connect(this); projectBusConnection.subscribe(DumbService.DUMB_MODE, new DumbService.DumbModeListener() { @Override public void exitDumbMode() { ApplicationManager.getApplication().invokeLater(() -> { updateSearchFieldAdvertisement(); rebuildList(); }); } }); projectBusConnection.subscribe(AnActionListener.TOPIC, new AnActionListener() { @Override public void afterActionPerformed(@NotNull AnAction action, @NotNull DataContext dataContext, @NotNull AnActionEvent event) { if (action == mySelectedTab.everywhereAction && event.getInputEvent() != null) { myEverywhereAutoSet = false; } } }); ApplicationManager.getApplication() .getMessageBus() .connect(this) .subscribe(ProgressWindow.TOPIC, pw -> Disposer.register(pw, () -> myResultsList.repaint())); mySearchField.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { Component oppositeComponent = e.getOppositeComponent(); if (!isHintComponent(oppositeComponent) && !UIUtil.haveCommonOwner(SearchEverywhereUI.this, oppositeComponent)) { closePopup(); } } }); } private void showDescriptionForIndex(int index) { if (index >= 0 && !myListModel.isMoreElement(index)) { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(index); Object data = contributor.getDataForItem( myListModel.getElementAt(index), SearchEverywhereDataKeys.ITEM_STRING_DESCRIPTION.getName()); if (data instanceof String) { ActionMenu.showDescriptionInStatusBar(true, myResultsList, (String)data); } } } private void registerAction(String actionID, Supplier<? extends AnAction> actionSupplier) { Optional.ofNullable(ActionManager.getInstance().getAction(actionID)) .map(a -> a.getShortcutSet()) .ifPresent(shortcuts -> actionSupplier.get().registerCustomShortcutSet(shortcuts, this, this)); } private void registerAction(String actionID, Consumer<? super AnActionEvent> action) { registerAction(actionID, () -> DumbAwareAction.create(action)); } // when user adds shortcut for "select item" we should add shortcuts // with all possible modifiers (Ctrl, Shift, Alt, etc.) private void registerSelectItemAction() { int[] allowedModifiers = new int[]{ 0, InputEvent.SHIFT_MASK, InputEvent.CTRL_MASK, InputEvent.META_MASK, InputEvent.ALT_MASK }; ShortcutSet selectShortcuts = ActionManager.getInstance().getAction(SearchEverywhereActions.SELECT_ITEM).getShortcutSet(); Collection<KeyboardShortcut> keyboardShortcuts = Arrays.stream(selectShortcuts.getShortcuts()) .filter(shortcut -> shortcut instanceof KeyboardShortcut) .map(shortcut -> (KeyboardShortcut)shortcut) .collect(Collectors.toList()); for (int modifiers : allowedModifiers) { Collection<Shortcut> newShortcuts = new ArrayList<>(); for (KeyboardShortcut shortcut : keyboardShortcuts) { boolean hasSecondStroke = shortcut.getSecondKeyStroke() != null; KeyStroke originalStroke = hasSecondStroke ? shortcut.getSecondKeyStroke() : shortcut.getFirstKeyStroke(); if ((originalStroke.getModifiers() & modifiers) != 0) continue; KeyStroke newStroke = KeyStroke.getKeyStroke(originalStroke.getKeyCode(), originalStroke.getModifiers() | modifiers); newShortcuts.add(hasSecondStroke ? new KeyboardShortcut(shortcut.getFirstKeyStroke(), newStroke) : new KeyboardShortcut(newStroke, null)); } if (newShortcuts.isEmpty()) continue; ShortcutSet newShortcutSet = new CustomShortcutSet(newShortcuts.toArray(Shortcut.EMPTY_ARRAY)); DumbAwareAction.create(event -> { int[] indices = myResultsList.getSelectedIndices(); elementsSelected(indices, modifiers); }).registerCustomShortcutSet(newShortcutSet, this, this); } } private void triggerTabSwitched(AnActionEvent e) { String id = mySelectedTab.getContributor() .map(SearchEverywhereUsageTriggerCollector::getReportableContributorID) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(id) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.TAB_SWITCHED, data); } private void fetchGroups(boolean down) { int index = myResultsList.getSelectedIndex(); do { index += down ? 1 : -1; } while (index >= 0 && index < myListModel.getSize() && !myListModel.isGroupFirstItem(index) && !myListModel.isMoreElement(index)); if (index >= 0 && index < myListModel.getSize()) { myResultsList.setSelectedIndex(index); ScrollingUtil.ensureIndexIsVisible(myResultsList, index, 0); } } private Optional<SearchEverywhereCommandInfo> getSelectedCommand(String typedCommand) { int index = myResultsList.getSelectedIndex(); if (index < 0) return Optional.empty(); SearchEverywhereContributor contributor = myListModel.getContributorForIndex(index); if (contributor != myStubCommandContributor) return Optional.empty(); SearchEverywhereCommandInfo selectedCommand = (SearchEverywhereCommandInfo)myListModel.getElementAt(index); return selectedCommand.getCommand().contains(typedCommand) ? Optional.of(selectedCommand) : Optional.empty(); } @NotNull private static List<SearchEverywhereCommandInfo> getCommandsForCompletion(Collection<? extends SearchEverywhereContributor<?>> contributors, String enteredCommandPart) { Comparator<SearchEverywhereCommandInfo> cmdComparator = (cmd1, cmd2) -> { String cmdName1 = cmd1.getCommand(); String cmdName2 = cmd2.getCommand(); if (!enteredCommandPart.isEmpty()) { if (cmdName1.startsWith(enteredCommandPart) && !cmdName2.startsWith(enteredCommandPart)) return -1; if (!cmdName1.startsWith(enteredCommandPart) && cmdName2.startsWith(enteredCommandPart)) return 1; } return String.CASE_INSENSITIVE_ORDER.compare(cmdName1, cmd2.getCommand()); }; return contributors.stream() .flatMap(contributor -> contributor.getSupportedCommands().stream()) .filter(command -> command.getCommand().contains(enteredCommandPart)) .sorted(cmdComparator) .collect(Collectors.toList()); } private void onMouseClicked(@NotNull MouseEvent e) { boolean multiSelectMode = e.isShiftDown() || UIUtil.isControlKeyDown(e); if (e.getButton() == MouseEvent.BUTTON1 && !multiSelectMode) { e.consume(); final int i = myResultsList.locationToIndex(e.getPoint()); if (i > -1) { myResultsList.setSelectedIndex(i); elementsSelected(new int[]{i}, e.getModifiers()); } } } private boolean isHintComponent(Component component) { if (myHint != null && !myHint.isDisposed() && component != null) { return SwingUtilities.isDescendingFrom(component, myHint.getContent()); } return false; } private void elementsSelected(int[] indexes, int modifiers) { if (indexes.length == 1 && myListModel.isMoreElement(indexes[0])) { SearchEverywhereContributor contributor = myListModel.getContributorForIndex(indexes[0]); showMoreElements(contributor); return; } indexes = Arrays.stream(indexes) .filter(i -> !myListModel.isMoreElement(i)) .toArray(); String searchText = getSearchPattern(); if (searchText.startsWith(SearchTopHitProvider.getTopHitAccelerator()) && searchText.contains(" ")) { featureTriggered(SearchEverywhereUsageTriggerCollector.COMMAND_USED, null); } boolean closePopup = false; for (int i : indexes) { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(i); Object value = myListModel.getElementAt(i); String selectedTabContributorID = mySelectedTab.getContributor() .map(SearchEverywhereUsageTriggerCollector::getReportableContributorID) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); String reportableContributorID = SearchEverywhereUsageTriggerCollector.getReportableContributorID(contributor); FeatureUsageData data = SearchEverywhereUsageTriggerCollector.createData(reportableContributorID, selectedTabContributorID, i); if (value instanceof PsiElement) { data.addLanguage(((PsiElement) value).getLanguage()); } featureTriggered(SearchEverywhereUsageTriggerCollector.CONTRIBUTOR_ITEM_SELECTED, data); closePopup |= contributor.processSelectedItem(value, modifiers, searchText); } if (closePopup) { closePopup(); } else { ApplicationManager.getApplication().invokeLater(() -> myResultsList.repaint()); } } private void showMoreElements(SearchEverywhereContributor contributor) { featureTriggered(SearchEverywhereUsageTriggerCollector.MORE_ITEM_SELECTED, null); Map<SearchEverywhereContributor<?>, Collection<SearchEverywhereFoundElementInfo>> found = myListModel.getFoundElementsMap(); int limit = myListModel.getItemsForContributor(contributor) + (mySelectedTab.getContributor().isPresent() ? SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT : MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT); mySearchProgressIndicator = mySearcher.findMoreItems(found, getSearchPattern(), contributor, limit); } private void stopSearching() { if (mySearchProgressIndicator != null && !mySearchProgressIndicator.isCanceled()) { mySearchProgressIndicator.cancel(); } if (myBufferedListener != null) { myBufferedListener.clearBuffer(); } } private void closePopup() { ActionMenu.showDescriptionInStatusBar(true, myResultsList, null); stopSearching(); searchFinishedHandler.run(); } @NotNull private List<SearchEverywhereContributor<?>> getAllTabContributors() { return ContainerUtil.filter(myShownContributors, contributor -> myContributorsFilter.isSelected(contributor.getSearchProviderId())); } @NotNull private Collection<SearchEverywhereContributor<?>> getContributorsForCurrentTab() { return isAllTabSelected() ? getAllTabContributors() : Collections.singleton(mySelectedTab.getContributor().get()); } @TestOnly public Future<List<Object>> findElementsForPattern(String pattern) { CompletableFuture<List<Object>> future = new CompletableFuture<>(); mySearchListener.setTestCallback(list -> { future.complete(list); mySearchListener.setTestCallback(null); }); mySearchField.setText(pattern); return future; } private class CompositeCellRenderer implements ListCellRenderer<Object> { @Override public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { if (value == SearchListModel.MORE_ELEMENT) { Component component = myMoreRenderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); component.setPreferredSize(UIUtil.updateListRowHeight(component.getPreferredSize())); return component; } SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(index); Component component = SearchEverywhereClassifier.EP_Manager.getListCellRendererComponent( list, value, index, isSelected, cellHasFocus); if (component == null) { component = contributor.getElementsRenderer().getListCellRendererComponent( list, value, index, isSelected, true); } if (component instanceof JComponent) { Border border = ((JComponent)component).getBorder(); if (border != GotoActionModel.GotoActionListCellRenderer.TOGGLE_BUTTON_BORDER) { ((JComponent)component).setBorder(JBUI.Borders.empty(1, 2)); } } AppUIUtil.targetToDevice(component, list); component.setPreferredSize(UIUtil.updateListRowHeight(component.getPreferredSize())); if (isAllTabSelected() && myListModel.isGroupFirstItem(index)) { component = myGroupTitleRenderer.withDisplayedData(contributor.getFullGroupName(), component); } return component; } } private final ListCellRenderer<Object> myCommandRenderer = new ColoredListCellRenderer<Object>() { @Override protected void customizeCellRenderer(@NotNull JList<?> list, Object value, int index, boolean selected, boolean hasFocus) { setPaintFocusBorder(false); setIcon(EmptyIcon.ICON_16); setFont(list.getFont()); SearchEverywhereCommandInfo command = (SearchEverywhereCommandInfo)value; append(command.getCommandWithPrefix() + " ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, list.getForeground())); append(command.getDefinition(), new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, JBColor.GRAY)); setBackground(UIUtil.getListBackground(selected)); } }; private final ListCellRenderer<Object> myMoreRenderer = new ColoredListCellRenderer<Object>() { @Override protected int getMinHeight() { return -1; } @Override protected void customizeCellRenderer(@NotNull JList<?> list, Object value, int index, boolean selected, boolean hasFocus) { if (value != SearchListModel.MORE_ELEMENT) { throw new AssertionError(value); } setFont(UIUtil.getLabelFont().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.SMALL))); append(IdeBundle.message("search.everywhere.points.more"), SMALL_LABEL_ATTRS); setIpad(JBInsets.create(1, 7)); setMyBorder(null); } }; private final GroupTitleRenderer myGroupTitleRenderer = new GroupTitleRenderer(); private static class GroupTitleRenderer extends CellRendererPanel { final SimpleColoredComponent titleLabel = new SimpleColoredComponent(); GroupTitleRenderer() { setLayout(new BorderLayout()); SeparatorComponent separatorComponent = new SeparatorComponent( titleLabel.getPreferredSize().height / 2, JBUI.CurrentTheme.BigPopup.listSeparatorColor(), null); JPanel topPanel = JBUI.Panels.simplePanel(5, 0) .addToCenter(separatorComponent) .addToLeft(titleLabel) .withBorder(JBUI.Borders.empty(1, 7)) .withBackground(UIUtil.getListBackground()); add(topPanel, BorderLayout.NORTH); } public GroupTitleRenderer withDisplayedData(String title, Component itemContent) { titleLabel.clear(); titleLabel.append(title, SMALL_LABEL_ATTRS); Component prevContent = ((BorderLayout)getLayout()).getLayoutComponent(BorderLayout.CENTER); if (prevContent != null) { remove(prevContent); } add(itemContent, BorderLayout.CENTER); accessibleContext = itemContent.getAccessibleContext(); return this; } } public static class SearchListModel extends AbstractListModel<Object> { static final Object MORE_ELEMENT = new Object(); private final List<SearchEverywhereFoundElementInfo> listElements = new ArrayList<>(); private boolean resultsExpired = false; public boolean isResultsExpired() { return resultsExpired; } public void expireResults() { resultsExpired = true; } @Override public int getSize() { return listElements.size(); } @Override public Object getElementAt(int index) { return listElements.get(index).getElement(); } public List<Object> getItems() { return new ArrayList<>(values()); } public Collection<Object> getFoundItems(SearchEverywhereContributor contributor) { return listElements.stream() .filter(info -> info.getContributor() == contributor && info.getElement() != MORE_ELEMENT) .map(info -> info.getElement()) .collect(Collectors.toList()); } public boolean hasMoreElements(SearchEverywhereContributor contributor) { return listElements.stream() .anyMatch(info -> info.getElement() == MORE_ELEMENT && info.getContributor() == contributor); } public void addElements(List<? extends SearchEverywhereFoundElementInfo> items) { if (items.isEmpty()) { return; } Map<SearchEverywhereContributor<?>, List<SearchEverywhereFoundElementInfo>> itemsMap = new HashMap<>(); items.forEach(info -> { List<SearchEverywhereFoundElementInfo> list = itemsMap.computeIfAbsent(info.getContributor(), contributor -> new ArrayList<>()); list.add(info); }); itemsMap.forEach((contributor, list) -> list.sort(Comparator.comparingInt(SearchEverywhereFoundElementInfo::getPriority).reversed())); if (resultsExpired) { retainContributors(itemsMap.keySet()); clearMoreItems(); itemsMap.forEach((contributor, list) -> { Object[] oldItems = ArrayUtil.toObjectArray(getFoundItems(contributor)); Object[] newItems = list.stream() .map(SearchEverywhereFoundElementInfo::getElement) .toArray(); try { Diff.Change change = Diff.buildChanges(oldItems, newItems); applyChange(change, contributor, list); } catch (FilesTooBigForDiffException e) { LOG.error("Cannot calculate diff for updated search results"); } }); resultsExpired = false; } else { itemsMap.forEach((contributor, list) -> { int startIndex = contributors().indexOf(contributor); int insertionIndex = getInsertionPoint(contributor); int endIndex = insertionIndex + list.size() - 1; listElements.addAll(insertionIndex, list); fireIntervalAdded(this, insertionIndex, endIndex); // there were items for this contributor before update if (startIndex >= 0) { listElements.subList(startIndex, endIndex + 1) .sort(Comparator.comparingInt(SearchEverywhereFoundElementInfo::getPriority).reversed()); fireContentsChanged(this, startIndex, endIndex); } }); } } private void retainContributors(Collection<SearchEverywhereContributor<?>> retainContributors) { Iterator<SearchEverywhereFoundElementInfo> iterator = listElements.iterator(); int startInterval = 0; int endInterval = -1; while (iterator.hasNext()) { SearchEverywhereFoundElementInfo item = iterator.next(); if (retainContributors.contains(item.getContributor())) { if (startInterval <= endInterval) { fireIntervalRemoved(this, startInterval, endInterval); startInterval = endInterval + 2; } else { startInterval++; } } else { iterator.remove(); } endInterval++; } if (startInterval <= endInterval) { fireIntervalRemoved(this, startInterval, endInterval); } } private void clearMoreItems() { ListIterator<SearchEverywhereFoundElementInfo> iterator = listElements.listIterator(); while (iterator.hasNext()) { int index = iterator.nextIndex(); if (iterator.next().getElement() == MORE_ELEMENT) { iterator.remove(); fireContentsChanged(this, index, index); } } } private void applyChange(Diff.Change change, SearchEverywhereContributor<?> contributor, List<SearchEverywhereFoundElementInfo> newItems) { int firstItemIndex = contributors().indexOf(contributor); if (firstItemIndex < 0) { firstItemIndex = getInsertionPoint(contributor); } for (Diff.Change ch : toRevertedList(change)) { if (ch.deleted > 0) { for (int i = ch.deleted - 1; i >= 0; i--) { int index = firstItemIndex + ch.line0 + i; listElements.remove(index); } fireIntervalRemoved(this, firstItemIndex + ch.line0, firstItemIndex + ch.line0 + ch.deleted - 1); } if (ch.inserted > 0) { List<SearchEverywhereFoundElementInfo> addedItems = newItems.subList(ch.line1, ch.line1 + ch.inserted); listElements.addAll(firstItemIndex + ch.line0, addedItems); fireIntervalAdded(this, firstItemIndex + ch.line0, firstItemIndex + ch.line0 + ch.inserted - 1); } } } private static List<Diff.Change> toRevertedList(Diff.Change change) { List<Diff.Change> res = new ArrayList<>(); while (change != null) { res.add(0, change); change = change.link; } return res; } public void removeElement(@NotNull Object item, SearchEverywhereContributor contributor) { int index = contributors().indexOf(contributor); if (index < 0) { return; } while (index < listElements.size() && listElements.get(index).getContributor() == contributor) { if (item.equals(listElements.get(index).getElement())) { listElements.remove(index); fireIntervalRemoved(this, index, index); return; } index++; } } public void setHasMore(SearchEverywhereContributor<?> contributor, boolean newVal) { int index = contributors().lastIndexOf(contributor); if (index < 0) { return; } boolean alreadyHas = isMoreElement(index); if (alreadyHas && !newVal) { listElements.remove(index); fireIntervalRemoved(this, index, index); } if (!alreadyHas && newVal) { index += 1; listElements.add(index, new SearchEverywhereFoundElementInfo(MORE_ELEMENT, 0, contributor)); fireIntervalAdded(this, index, index); } } public void clear() { int index = listElements.size() - 1; listElements.clear(); if (index >= 0) { fireIntervalRemoved(this, 0, index); } } public boolean contains(Object val) { return values().contains(val); } public boolean isMoreElement(int index) { return listElements.get(index).getElement() == MORE_ELEMENT; } public <Item> SearchEverywhereContributor<Item> getContributorForIndex(int index) { //noinspection unchecked return (SearchEverywhereContributor<Item>)listElements.get(index).getContributor(); } public boolean isGroupFirstItem(int index) { return index == 0 || listElements.get(index).getContributor() != listElements.get(index - 1).getContributor(); } public int getItemsForContributor(SearchEverywhereContributor<?> contributor) { List<SearchEverywhereContributor> contributorsList = contributors(); int first = contributorsList.indexOf(contributor); int last = contributorsList.lastIndexOf(contributor); if (isMoreElement(last)) { last -= 1; } return last - first + 1; } public Map<SearchEverywhereContributor<?>, Collection<SearchEverywhereFoundElementInfo>> getFoundElementsMap() { return listElements.stream() .filter(info -> info.element != MORE_ELEMENT) .collect(Collectors.groupingBy(o -> o.getContributor(), Collectors.toCollection(ArrayList::new))); } @NotNull private List<SearchEverywhereContributor> contributors() { return Lists.transform(listElements, info -> info.getContributor()); } @NotNull private List<Object> values() { return Lists.transform(listElements, info -> info.getElement()); } private int getInsertionPoint(SearchEverywhereContributor contributor) { if (listElements.isEmpty()) { return 0; } List<SearchEverywhereContributor> list = contributors(); int index = list.lastIndexOf(contributor); if (index >= 0) { return isMoreElement(index) ? index : index + 1; } index = Collections.binarySearch(list, contributor, Comparator.comparingInt(SearchEverywhereContributor::getSortWeight)); return -index - 1; } } private class ShowInFindToolWindowAction extends DumbAwareAction { ShowInFindToolWindowAction() { super(IdeBundle.messagePointer("show.in.find.window.button.name"), IdeBundle.messagePointer("show.in.find.window.button.description"), AllIcons.General.Pin_tab); } @Override public void actionPerformed(@NotNull AnActionEvent e) { stopSearching(); Collection<SearchEverywhereContributor<?>> contributors = getContributorsForCurrentTab(); contributors = ContainerUtil.filter(contributors, SearchEverywhereContributor::showInFindResults); if (contributors.isEmpty()) { return; } String searchText = getSearchPattern(); String contributorsString = contributors.stream() .map(SearchEverywhereContributor::getGroupName) .collect(Collectors.joining(", ")); UsageViewPresentation presentation = new UsageViewPresentation(); String tabCaptionText = IdeBundle.message("searcheverywhere.found.matches.title", searchText, contributorsString); presentation.setCodeUsagesString(tabCaptionText); presentation.setUsagesInGeneratedCodeString( IdeBundle.message("searcheverywhere.found.matches.generated.code.title", searchText, contributorsString)); presentation.setTargetsNodeText(IdeBundle.message("searcheverywhere.found.targets.title", searchText, contributorsString)); presentation.setTabName(tabCaptionText); presentation.setTabText(tabCaptionText); Collection<Usage> usages = new LinkedHashSet<>(); Collection<PsiElement> targets = new LinkedHashSet<>(); Collection<Object> cached = contributors.stream() .flatMap(contributor -> myListModel.getFoundItems(contributor).stream()) .collect(Collectors.toSet()); fillUsages(cached, usages, targets); Collection<SearchEverywhereContributor<?>> contributorsForAdditionalSearch; contributorsForAdditionalSearch = ContainerUtil.filter(contributors, contributor -> myListModel.hasMoreElements(contributor)); closePopup(); if (!contributorsForAdditionalSearch.isEmpty()) { ProgressManager.getInstance().run(new Task.Modal(myProject, tabCaptionText, true) { private final ProgressIndicator progressIndicator = new ProgressIndicatorBase(); @Override public void run(@NotNull ProgressIndicator indicator) { progressIndicator.start(); TooManyUsagesStatus tooManyUsagesStatus = TooManyUsagesStatus.createFor(progressIndicator); Collection<Object> foundElements = new ArrayList<>(); int alreadyFoundCount = cached.size(); for (SearchEverywhereContributor<?> contributor : contributorsForAdditionalSearch) { if (progressIndicator.isCanceled()) break; try { fetch(contributor, foundElements, alreadyFoundCount, tooManyUsagesStatus); } catch (ProcessCanceledException ignore) { } } fillUsages(foundElements, usages, targets); } <Item> void fetch(SearchEverywhereContributor<Item> contributor, Collection<Object> foundElements, int alreadyFoundCount, TooManyUsagesStatus tooManyUsagesStatus) { contributor.fetchElements(searchText, progressIndicator, o -> { if (progressIndicator.isCanceled()) { return false; } if (cached.contains(o)) { return true; } foundElements.add(o); tooManyUsagesStatus.pauseProcessingIfTooManyUsages(); if (foundElements.size() + alreadyFoundCount >= UsageLimitUtil.USAGES_LIMIT && tooManyUsagesStatus.switchTooManyUsagesStatus()) { int usageCount = foundElements.size() + alreadyFoundCount; UsageViewManagerImpl.showTooManyUsagesWarningLater( getProject(), tooManyUsagesStatus, progressIndicator, presentation, usageCount, null); return !progressIndicator.isCanceled(); } return true; }); } @Override public void onCancel() { progressIndicator.cancel(); } @Override public void onSuccess() { showInFindWindow(targets, usages, presentation); } @Override public void onThrowable(@NotNull Throwable error) { progressIndicator.cancel(); } }); } else { showInFindWindow(targets, usages, presentation); } } private void fillUsages(Collection<Object> foundElements, Collection<? super Usage> usages, Collection<? super PsiElement> targets) { ReadAction.run(() -> foundElements.stream() .filter(o -> o instanceof PsiElement) .forEach(o -> { PsiElement element = (PsiElement)o; if (element.getTextRange() != null) { UsageInfo usageInfo = new UsageInfo(element); usages.add(new UsageInfo2UsageAdapter(usageInfo)); } else { targets.add(element); } })); } private void showInFindWindow(Collection<? extends PsiElement> targets, Collection<Usage> usages, UsageViewPresentation presentation) { UsageTarget[] targetsArray = targets.isEmpty() ? UsageTarget.EMPTY_ARRAY : PsiElement2UsageTargetAdapter.convert(PsiUtilCore.toPsiElementArray(targets)); Usage[] usagesArray = usages.toArray(Usage.EMPTY_ARRAY); UsageViewManager.getInstance(myProject).showUsages(targetsArray, usagesArray, presentation); } @Override public void update(@NotNull AnActionEvent e) { SearchEverywhereContributor<?> contributor = mySelectedTab == null ? null : mySelectedTab.contributor; e.getPresentation().setEnabled(contributor == null || contributor.showInFindResults()); e.getPresentation().setIcon(ToolWindowManager.getInstance(myProject).getLocationIcon(ToolWindowId.FIND, AllIcons.General.Pin_tab)); } } static class FiltersAction extends ShowFilterAction { final PersistentSearchEverywhereContributorFilter<?> filter; final Runnable rebuildRunnable; FiltersAction(@NotNull PersistentSearchEverywhereContributorFilter<?> filter, @NotNull Runnable rebuildRunnable) { this.filter = filter; this.rebuildRunnable = rebuildRunnable; } @Override public boolean isEnabled() { return true; } @Override protected boolean isActive() { return filter.getAllElements().size() != filter.getSelectedElements().size(); } @Override protected ElementsChooser<?> createChooser() { return createChooser(filter, rebuildRunnable); } private static <T> ElementsChooser<T> createChooser(@NotNull PersistentSearchEverywhereContributorFilter<T> filter, @NotNull Runnable rebuildRunnable) { ElementsChooser<T> res = new ElementsChooser<T>(filter.getAllElements(), false) { @Override protected String getItemText(@NotNull T value) { return filter.getElementText(value); } @Nullable @Override protected Icon getItemIcon(@NotNull T value) { return filter.getElementIcon(value); } }; res.markElements(filter.getSelectedElements()); ElementsChooser.ElementsMarkListener<T> listener = (element, isMarked) -> { filter.setSelected(element, isMarked); rebuildRunnable.run(); }; res.addElementsMarkListener(listener); return res; } } private class CompleteCommandAction extends DumbAwareAction { @Override public void actionPerformed(@NotNull AnActionEvent e) { if (completeCommand()) { FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(null) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.COMMAND_COMPLETED, data); } } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabled(getCompleteCommand().isPresent()); } private boolean completeCommand() { Optional<SearchEverywhereCommandInfo> suggestedCommand = getCompleteCommand(); if (suggestedCommand.isPresent()) { mySearchField.setText(suggestedCommand.get().getCommandWithPrefix() + " "); return true; } return false; } private Optional<SearchEverywhereCommandInfo> getCompleteCommand() { String pattern = getSearchPattern(); String commandPrefix = SearchTopHitProvider.getTopHitAccelerator(); if (pattern.startsWith(commandPrefix) && !pattern.contains(" ")) { String typedCommand = pattern.substring(commandPrefix.length()); SearchEverywhereCommandInfo command = getSelectedCommand(typedCommand).orElseGet(() -> { List<SearchEverywhereCommandInfo> completions = getCommandsForCompletion(getContributorsForCurrentTab(), typedCommand); return completions.isEmpty() ? null : completions.get(0); }); return Optional.ofNullable(command); } return Optional.empty(); } } private String getNotFoundText() { return mySelectedTab.getContributor() .map(c -> IdeBundle.message("searcheverywhere.nothing.found.for.contributor.anywhere", c.getFullGroupName().toLowerCase(Locale.ROOT))) .orElse(IdeBundle.message("searcheverywhere.nothing.found.for.all.anywhere")); } private void featureTriggered(@NotNull String featureID, @Nullable FeatureUsageData data) { if (data != null) { SearchEverywhereUsageTriggerCollector.trigger(myProject, featureID, data); } else { SearchEverywhereUsageTriggerCollector.trigger(myProject, featureID); } } private final SearchListener mySearchListener = new SearchListener(); private class SearchListener implements SESearcher.Listener { private Consumer<List<Object>> testCallback; @Override public void elementsAdded(@NotNull List<? extends SearchEverywhereFoundElementInfo> list) { boolean wasEmpty = myListModel.listElements.isEmpty(); mySelectionTracker.lock(); myListModel.addElements(list); mySelectionTracker.unlock(); mySelectionTracker.restoreSelection(); if (wasEmpty && !myListModel.listElements.isEmpty()) { Object prevSelection = ((SearchEverywhereManagerImpl)SearchEverywhereManager.getInstance(myProject)) .getPrevSelection(getSelectedContributorID()); if (prevSelection instanceof Integer) { for (SearchEverywhereFoundElementInfo info : myListModel.listElements) { if (Objects.hashCode(info.element) == ((Integer)prevSelection).intValue()) { myResultsList.setSelectedValue(info.element, true); break; } } } } } @Override public void elementsRemoved(@NotNull List<? extends SearchEverywhereFoundElementInfo> list) { list.forEach(info -> myListModel.removeElement(info.getElement(), info.getContributor())); } @Override public void searchFinished(@NotNull Map<SearchEverywhereContributor<?>, Boolean> hasMoreContributors) { if (myResultsList.isEmpty() || myListModel.isResultsExpired()) { if (myEverywhereAutoSet && !isEverywhere() && canToggleEverywhere() && !getSearchPattern().isEmpty()) { setEverywhereAuto(true); myNotFoundString = getSearchPattern(); return; } hideHint(); if (myListModel.isResultsExpired()) { myListModel.clear(); } } myResultsList.setEmptyText(getSearchPattern().isEmpty() ? "" : getNotFoundText()); hasMoreContributors.forEach(myListModel::setHasMore); mySelectionTracker.resetSelectionIfNeeded(); if (testCallback != null) testCallback.consume(myListModel.getItems()); } @TestOnly void setTestCallback(@Nullable Consumer<List<Object>> callback) { testCallback = callback; } } private final SearchEverywhereContributor<Object> myStubCommandContributor = new SearchEverywhereContributor<Object>() { @NotNull @Override public String getSearchProviderId() { return "CommandsContributor"; } @NotNull @Override public String getGroupName() { return IdeBundle.message("searcheverywhere.commands.tab.name"); } @Override public int getSortWeight() { return 10; } @Override public boolean showInFindResults() { return false; } @Override public void fetchElements(@NotNull String pattern, @NotNull ProgressIndicator progressIndicator, @NotNull Processor<? super Object> consumer) {} @Override public boolean processSelectedItem(@NotNull Object selected, int modifiers, @NotNull String searchText) { mySearchField.setText(((SearchEverywhereCommandInfo)selected).getCommandWithPrefix() + " "); featureTriggered(SearchEverywhereUsageTriggerCollector.COMMAND_COMPLETED, null); return false; } @NotNull @Override public ListCellRenderer<? super Object> getElementsRenderer() { return myCommandRenderer; } @Nullable @Override public Object getDataForItem(@NotNull Object element, @NotNull String dataId) { return null; } }; }
platform/lang-impl/src/com/intellij/ide/actions/searcheverywhere/SearchEverywhereUI.java
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.actions.searcheverywhere; import com.google.common.collect.Lists; import com.intellij.find.findUsages.PsiElement2UsageTargetAdapter; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeBundle; import com.intellij.ide.SearchTopHitProvider; import com.intellij.ide.actions.BigPopupUI; import com.intellij.ide.actions.SearchEverywhereClassifier; import com.intellij.ide.actions.bigPopup.ShowFilterAction; import com.intellij.ide.actions.searcheverywhere.statistics.SearchEverywhereUsageTriggerCollector; import com.intellij.ide.actions.searcheverywhere.statistics.SearchFieldStatisticsCollector; import com.intellij.ide.util.ElementsChooser; import com.intellij.ide.util.gotoByName.GotoActionModel; import com.intellij.ide.util.gotoByName.QuickSearchComponent; import com.intellij.ide.util.gotoByName.SearchEverywhereConfiguration; import com.intellij.internal.statistic.eventLog.FeatureUsageData; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.AnActionListener; import com.intellij.openapi.actionSystem.impl.ActionMenu; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.progress.util.ProgressIndicatorBase; import com.intellij.openapi.progress.util.ProgressWindow; import com.intellij.openapi.progress.util.TooManyUsagesStatus; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.wm.ToolWindowId; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.psi.PsiElement; import com.intellij.psi.codeStyle.MinusculeMatcher; import com.intellij.psi.codeStyle.NameUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.ui.*; import com.intellij.ui.components.JBList; import com.intellij.ui.components.fields.ExtendableTextComponent; import com.intellij.ui.components.fields.ExtendableTextField; import com.intellij.ui.popup.PopupUpdateProcessor; import com.intellij.ui.scale.JBUIScale; import com.intellij.usageView.UsageInfo; import com.intellij.usages.*; import com.intellij.usages.impl.UsageViewManagerImpl; import com.intellij.util.ArrayUtil; import com.intellij.util.Consumer; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.diff.Diff; import com.intellij.util.diff.FilesTooBigForDiffException; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.text.MatcherHolder; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import javax.swing.border.Border; import javax.swing.event.DocumentEvent; import java.awt.*; import java.awt.event.*; import java.util.List; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * @author Konstantin Bulenkov * @author Mikhail.Sokolov */ public final class SearchEverywhereUI extends BigPopupUI implements DataProvider, QuickSearchComponent { private static final Logger LOG = Logger.getInstance(SearchEverywhereUI.class); public static final String SEARCH_EVERYWHERE_SEARCH_FILED_KEY = "search-everywhere-textfield"; //only for testing purposes public static final int SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT = 30; public static final int MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT = 15; public static final int THROTTLING_TIMEOUT = 100; private static final SimpleTextAttributes SMALL_LABEL_ATTRS = new SimpleTextAttributes( SimpleTextAttributes.STYLE_SMALLER, JBUI.CurrentTheme.BigPopup.listTitleLabelForeground()); private final List<? extends SearchEverywhereContributor<?>> myShownContributors; private SearchListModel myListModel; private SETab mySelectedTab; private final List<SETab> myTabs = new ArrayList<>(); private final Function<String, String> myShortcutSupplier; private boolean myEverywhereAutoSet = true; private String myNotFoundString; private JBPopup myHint; private final SESearcher mySearcher; private final ThrottlingListenerWrapper myBufferedListener; private ProgressIndicator mySearchProgressIndicator; private final SEListSelectionTracker mySelectionTracker; private final PersistentSearchEverywhereContributorFilter<String> myContributorsFilter; private ActionToolbar myToolbar; public SearchEverywhereUI(@NotNull Project project, @NotNull List<? extends SearchEverywhereContributor<?>> contributors) { this(project, contributors, s -> null); } public SearchEverywhereUI(@NotNull Project project, @NotNull List<? extends SearchEverywhereContributor<?>> contributors, @NotNull Function<String, String> shortcutSupplier) { super(project); List<SEResultsEqualityProvider> equalityProviders = SEResultsEqualityProvider.getProviders(); myBufferedListener = new ThrottlingListenerWrapper(THROTTLING_TIMEOUT, mySearchListener, Runnable::run); mySearcher = new MultiThreadSearcher(myBufferedListener, run -> ApplicationManager.getApplication().invokeLater(run), equalityProviders); myShownContributors = contributors; myShortcutSupplier = shortcutSupplier; Map<String, String> namesMap = ContainerUtil.map2Map(contributors, c -> Pair.create(c.getSearchProviderId(), c.getFullGroupName())); myContributorsFilter = new PersistentSearchEverywhereContributorFilter<>( ContainerUtil.map(contributors, c -> c.getSearchProviderId()), SearchEverywhereConfiguration.getInstance(project), namesMap::get, c -> null); init(); initSearchActions(); myResultsList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); myResultsList.addListSelectionListener(e -> { int[] selectedIndices = myResultsList.getSelectedIndices(); if (selectedIndices.length > 1) { boolean multiSelection = Arrays.stream(selectedIndices) .allMatch(i -> myListModel.getContributorForIndex(i).isMultiSelectionSupported()); if (!multiSelection) { int index = myResultsList.getLeadSelectionIndex(); myResultsList.setSelectedIndex(index); } } }); mySelectionTracker = new SEListSelectionTracker(myResultsList, myListModel); myResultsList.addListSelectionListener(mySelectionTracker); Disposer.register(this, SearchFieldStatisticsCollector.createAndStart(mySearchField, myProject)); } @Override @NotNull protected CompositeCellRenderer createCellRenderer() { return new CompositeCellRenderer(); } @NotNull @Override public JBList<Object> createList() { myListModel = new SearchListModel(); addListDataListener(myListModel); return new JBList<>(myListModel); } public void toggleEverywhereFilter() { myEverywhereAutoSet = false; if (mySelectedTab.everywhereAction == null) return; if (!mySelectedTab.everywhereAction.canToggleEverywhere()) return; mySelectedTab.everywhereAction.setEverywhere( !mySelectedTab.everywhereAction.isEverywhere()); myToolbar.updateActionsImmediately(); } private void setEverywhereAuto(boolean everywhere) { myEverywhereAutoSet = true; if (mySelectedTab.everywhereAction == null) return; if (!mySelectedTab.everywhereAction.canToggleEverywhere()) return; mySelectedTab.everywhereAction.setEverywhere(everywhere); myToolbar.updateActionsImmediately(); } private boolean isEverywhere() { if (mySelectedTab.everywhereAction == null) return true; return mySelectedTab.everywhereAction.isEverywhere(); } private boolean canToggleEverywhere() { if (mySelectedTab.everywhereAction == null) return false; return mySelectedTab.everywhereAction.canToggleEverywhere(); } public void switchToContributor(@NotNull String contributorID) { SETab selectedTab = myTabs.stream() .filter(tab -> tab.getID().equals(contributorID)) .findAny() .orElseThrow(() -> new IllegalArgumentException(String.format("Contributor %s is not supported", contributorID))); switchToTab(selectedTab); } private void switchToNextTab() { int currentIndex = myTabs.indexOf(mySelectedTab); SETab nextTab = currentIndex == myTabs.size() - 1 ? myTabs.get(0) : myTabs.get(currentIndex + 1); switchToTab(nextTab); } private void switchToPrevTab() { int currentIndex = myTabs.indexOf(mySelectedTab); SETab prevTab = currentIndex == 0 ? myTabs.get(myTabs.size() - 1) : myTabs.get(currentIndex - 1); switchToTab(prevTab); } private void switchToTab(SETab tab) { boolean prevTabIsAll = mySelectedTab != null && isAllTabSelected(); mySelectedTab = tab; boolean nextTabIsAll = isAllTabSelected(); if (myEverywhereAutoSet && isEverywhere() && canToggleEverywhere()) { setEverywhereAuto(false); } rebuildSearchFieldExtensions(); if (prevTabIsAll != nextTabIsAll) { //reset cell renderer to show/hide group titles in "All" tab myResultsList.setCellRenderer(myResultsList.getCellRenderer()); } if (myToolbar != null) { myToolbar.updateActionsImmediately(); } repaint(); rebuildList(); } private void rebuildSearchFieldExtensions() { if (mySearchField != null) { Boolean commandsSupported = mySelectedTab.getContributor() .map(contributor -> !contributor.getSupportedCommands().isEmpty()) .orElse(true); if (commandsSupported) { mySearchField.addExtension(hintExtension); } else { mySearchField.removeExtension(hintExtension); } mySearchField.removeExtension(myAdvertisement); if (!commandsSupported) { mySelectedTab.getContributor().map(c -> c.getAdvertisement()). ifPresent(s -> mySearchField.addExtension(myAdvertisement.withText(s))); } } } private final MyAdvertisement myAdvertisement = new MyAdvertisement(); private final class MyAdvertisement implements ExtendableTextComponent.Extension { private final TextIcon icon; String message = ""; { icon = new TextIcon(message, JBUI.CurrentTheme.BigPopup.searchFieldGrayForeground(), null, 0); icon.setFont(RelativeFont.SMALL.derive(getFont())); } MyAdvertisement withText(@NotNull String text) { icon.setText(text); return this; } @Override public Icon getIcon(boolean hovered) { return icon; } } public String getSelectedContributorID() { return mySelectedTab.getID(); } @Nullable public Object getSelectionIdentity() { Object value = myResultsList.getSelectedValue(); return value == null ? null : Objects.hashCode(value); } @Override public void dispose() { stopSearching(); myListModel.clear(); } @Nullable @Override public Object getData(@NotNull String dataId) { IntStream indicesStream = Arrays.stream(myResultsList.getSelectedIndices()) .filter(i -> !myListModel.isMoreElement(i)); //common data section--------------------- if (PlatformDataKeys.PREDEFINED_TEXT.is(dataId)) { return getSearchPattern(); } if (CommonDataKeys.PROJECT.is(dataId)) { return myProject; } if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) { List<PsiElement> elements = indicesStream.mapToObj(i -> { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(i); Object item = myListModel.getElementAt(i); Object psi = contributor.getDataForItem(item, CommonDataKeys.PSI_ELEMENT.getName()); return (PsiElement)psi; }) .filter(Objects::nonNull) .collect(Collectors.toList()); return PsiUtilCore.toPsiElementArray(elements); } //item-specific data section-------------- return indicesStream.mapToObj(i -> { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(i); Object item = myListModel.getElementAt(i); return contributor.getDataForItem(item, dataId); }) .filter(Objects::nonNull) .findFirst() .orElse(null); } @Override public void registerHint(@NotNull JBPopup h) { if (myHint != null && myHint.isVisible() && myHint != h) { myHint.cancel(); } myHint = h; } @Override public void unregisterHint() { myHint = null; } private void hideHint() { if (myHint != null && myHint.isVisible()) { myHint.cancel(); } } private void updateHint(Object element) { if (myHint == null || !myHint.isVisible()) return; final PopupUpdateProcessor updateProcessor = myHint.getUserData(PopupUpdateProcessor.class); if (updateProcessor != null) { updateProcessor.updatePopup(element); } } private boolean isAllTabSelected() { return SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID.equals(getSelectedContributorID()); } @Override @NotNull protected JPanel createSettingsPanel() { DefaultActionGroup actionGroup = new DefaultActionGroup(); actionGroup.addAction(new ActionGroup() { @Override public AnAction @NotNull [] getChildren(@Nullable AnActionEvent e) { if (e == null || mySelectedTab == null) return EMPTY_ARRAY; return mySelectedTab.actions.toArray(EMPTY_ARRAY); } }); actionGroup.addAction(new ShowInFindToolWindowAction()); myToolbar = ActionManager.getInstance().createActionToolbar("search.everywhere.toolbar", actionGroup, true); myToolbar.setLayoutPolicy(ActionToolbar.NOWRAP_LAYOUT_POLICY); myToolbar.updateActionsImmediately(); JComponent toolbarComponent = myToolbar.getComponent(); toolbarComponent.setOpaque(false); toolbarComponent.setBorder(JBUI.Borders.empty(2, 18, 2, 9)); return (JPanel)toolbarComponent; } @NotNull @Override protected String getInitialHint() { return IdeBundle.message("searcheverywhere.history.shortcuts.hint", KeymapUtil.getKeystrokeText(SearchTextField.ALT_SHOW_HISTORY_KEYSTROKE), KeymapUtil.getKeystrokeText(SearchTextField.SHOW_HISTORY_KEYSTROKE)); } @Override protected @NotNull String getAccessibleName() { return IdeBundle.message("searcheverywhere.accessible.name"); } @NotNull @Override protected ExtendableTextField createSearchField() { SearchField res = new SearchField() { @NotNull @Override protected Extension getLeftExtension() { return new Extension() { @Override public Icon getIcon(boolean hovered) { return AllIcons.Actions.Search; } @Override public boolean isIconBeforeText() { return true; } @Override public int getIconGap() { return JBUIScale.scale(10); } }; } }; res.putClientProperty(SEARCH_EVERYWHERE_SEARCH_FILED_KEY, true); return res; } @Override protected void installScrollingActions() { ScrollingUtil.installMoveUpAction(myResultsList, getSearchField()); ScrollingUtil.installMoveDownAction(myResultsList, getSearchField()); } @Override @NotNull protected JPanel createTopLeftPanel() { JPanel contributorsPanel = new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0)); contributorsPanel.setOpaque(false); SETab allTab = new SETab(null); contributorsPanel.add(allTab); myTabs.add(allTab); myShownContributors.stream() .filter(SearchEverywhereContributor::isShownInSeparateTab) .forEach(contributor -> { SETab tab = new SETab(contributor); contributorsPanel.add(tab); myTabs.add(tab); }); return contributorsPanel; } private class SETab extends JLabel { final SearchEverywhereContributor<?> contributor; final List<AnAction> actions; final SearchEverywhereToggleAction everywhereAction; SETab(@Nullable SearchEverywhereContributor<?> contributor) { super(contributor == null ? IdeBundle.message("searcheverywhere.allelements.tab.name") : contributor.getGroupName()); this.contributor = contributor; updateTooltip(); Runnable onChanged = () -> { myToolbar.updateActionsImmediately(); rebuildList(); }; if (contributor == null) { String actionText = IdeUICustomization.getInstance().projectMessage("checkbox.include.non.project.items"); actions = Arrays.asList(new CheckBoxSearchEverywhereToggleAction(actionText) { final SearchEverywhereManagerImpl seManager = (SearchEverywhereManagerImpl)SearchEverywhereManager.getInstance(myProject); @Override public boolean isEverywhere() { return seManager.isEverywhere(); } @Override public void setEverywhere(boolean state) { seManager.setEverywhere(state); myTabs.stream() .filter(tab -> tab != SETab.this) .forEach(tab -> tab.everywhereAction.setEverywhere(state)); onChanged.run(); } }, new FiltersAction(myContributorsFilter, onChanged)); } else { actions = new ArrayList<>(contributor.getActions(onChanged)); } everywhereAction = (SearchEverywhereToggleAction)ContainerUtil.find(actions, o -> o instanceof SearchEverywhereToggleAction); Insets insets = JBUI.CurrentTheme.BigPopup.tabInsets(); setBorder(JBUI.Borders.empty(insets.top, insets.left, insets.bottom, insets.right)); addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { switchToTab(SETab.this); String reportableID = getContributor() .map(SearchEverywhereUsageTriggerCollector::getReportableContributorID) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(reportableID) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.TAB_SWITCHED, data); } }); } private void updateTooltip() { String shortcut = myShortcutSupplier.apply(getID()); if (shortcut != null) { setToolTipText(shortcut); } } public String getID() { return getContributor() .map(SearchEverywhereContributor::getSearchProviderId) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); } public Optional<SearchEverywhereContributor<?>> getContributor() { return Optional.ofNullable(contributor); } @Override public Dimension getPreferredSize() { Dimension size = super.getPreferredSize(); size.height = JBUIScale.scale(29); return size; } @Override public boolean isOpaque() { return mySelectedTab == this; } @Override public Color getBackground() { return mySelectedTab == this ? JBUI.CurrentTheme.BigPopup.selectedTabColor() : super.getBackground(); } @Override public Color getForeground() { return mySelectedTab == this ? JBUI.CurrentTheme.BigPopup.selectedTabTextColor() : super.getForeground(); } } private void rebuildList() { ApplicationManager.getApplication().assertIsDispatchThread(); stopSearching(); myResultsList.setEmptyText(IdeBundle.message("label.choosebyname.searching")); String rawPattern = getSearchPattern(); updateViewType(rawPattern.isEmpty() ? ViewType.SHORT : ViewType.FULL); String namePattern = mySelectedTab.getContributor() .map(contributor -> contributor.filterControlSymbols(rawPattern)) .orElse(rawPattern); MinusculeMatcher matcher = NameUtil.buildMatcherWithFallback("*" + rawPattern, "*" + namePattern, NameUtil.MatchingCaseSensitivity.NONE); MatcherHolder.associateMatcher(myResultsList, matcher); Map<SearchEverywhereContributor<?>, Integer> contributorsMap = new HashMap<>(); Optional<SearchEverywhereContributor<?>> selectedContributor = mySelectedTab.getContributor(); if (selectedContributor.isPresent()) { contributorsMap.put(selectedContributor.get(), SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT); } else { contributorsMap.putAll(getAllTabContributors().stream().collect(Collectors.toMap(c -> c, c -> MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT))); } List<SearchEverywhereContributor<?>> contributors = DumbService.getInstance(myProject).filterByDumbAwareness(contributorsMap.keySet()); if (contributors.isEmpty() && DumbService.isDumb(myProject)) { myResultsList.setEmptyText(IdeBundle.message("searcheverywhere.indexing.mode.not.supported", mySelectedTab.getText(), ApplicationNamesInfo.getInstance().getFullProductName())); myListModel.clear(); return; } if (contributors.size() != contributorsMap.size()) { myResultsList.setEmptyText(IdeBundle.message("searcheverywhere.indexing.incomplete.results", mySelectedTab.getText(), ApplicationNamesInfo.getInstance().getFullProductName())); } myListModel.expireResults(); contributors.forEach(contributor -> myListModel.setHasMore(contributor, false)); String commandPrefix = SearchTopHitProvider.getTopHitAccelerator(); if (rawPattern.startsWith(commandPrefix)) { String typedCommand = rawPattern.split(" ")[0].substring(commandPrefix.length()); List<SearchEverywhereCommandInfo> commands = getCommandsForCompletion(contributors, typedCommand); if (!commands.isEmpty()) { if (rawPattern.contains(" ")) { contributorsMap.keySet().retainAll(commands.stream() .map(SearchEverywhereCommandInfo::getContributor) .collect(Collectors.toSet())); } else { myListModel.clear(); List<SearchEverywhereFoundElementInfo> lst = ContainerUtil.map( commands, command -> new SearchEverywhereFoundElementInfo(command, 0, myStubCommandContributor)); myListModel.addElements(lst); ScrollingUtil.ensureSelectionExists(myResultsList); } } } mySearchProgressIndicator = mySearcher.search(contributorsMap, rawPattern); } private void initSearchActions() { MouseAdapter listMouseListener = new MouseAdapter() { private int currentDescriptionIndex = -1; @Override public void mouseClicked(MouseEvent e) { onMouseClicked(e); } @Override public void mouseMoved(MouseEvent e) { int index = myResultsList.locationToIndex(e.getPoint()); indexChanged(index); } @Override public void mouseExited(MouseEvent e) { int index = myResultsList.getSelectedIndex(); indexChanged(index); } private void indexChanged(int index) { if (index != currentDescriptionIndex) { currentDescriptionIndex = index; showDescriptionForIndex(index); } } }; myResultsList.addMouseMotionListener(listMouseListener); myResultsList.addMouseListener(listMouseListener); ScrollingUtil.redirectExpandSelection(myResultsList, mySearchField); Consumer<AnActionEvent> nextTabAction = e -> { switchToNextTab(); triggerTabSwitched(e); }; Consumer<AnActionEvent> prevTabAction = e -> { switchToPrevTab(); triggerTabSwitched(e); }; registerAction(SearchEverywhereActions.AUTOCOMPLETE_COMMAND, CompleteCommandAction::new); registerAction(SearchEverywhereActions.SWITCH_TO_NEXT_TAB, nextTabAction); registerAction(SearchEverywhereActions.SWITCH_TO_PREV_TAB, prevTabAction); registerAction(IdeActions.ACTION_NEXT_TAB, nextTabAction); registerAction(IdeActions.ACTION_PREVIOUS_TAB, prevTabAction); registerAction(IdeActions.ACTION_SWITCHER, e -> { if (e.getInputEvent().isShiftDown()) { switchToPrevTab(); } else { switchToNextTab(); } triggerTabSwitched(e); }); registerAction(SearchEverywhereActions.NAVIGATE_TO_NEXT_GROUP, e -> { fetchGroups(true); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(null) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.GROUP_NAVIGATE, data); }); registerAction(SearchEverywhereActions.NAVIGATE_TO_PREV_GROUP, e -> { fetchGroups(false); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(null) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.GROUP_NAVIGATE, data); }); registerSelectItemAction(); AnAction escape = ActionManager.getInstance().getAction("EditorEscape"); DumbAwareAction.create(__ -> closePopup()) .registerCustomShortcutSet(escape == null ? CommonShortcuts.ESCAPE : escape.getShortcutSet(), this); mySearchField.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(@NotNull DocumentEvent e) { String newSearchString = getSearchPattern(); if (myNotFoundString != null) { boolean newPatternContainsPrevious = myNotFoundString.length() > 1 && newSearchString.contains(myNotFoundString); if (myEverywhereAutoSet && isEverywhere() && canToggleEverywhere() && !newPatternContainsPrevious) { myNotFoundString = null; setEverywhereAuto(false); return; } } rebuildList(); } }); myResultsList.addListSelectionListener(e -> { Object selectedValue = myResultsList.getSelectedValue(); if (selectedValue != null && myHint != null && myHint.isVisible()) { updateHint(selectedValue); } showDescriptionForIndex(myResultsList.getSelectedIndex()); }); MessageBusConnection projectBusConnection = myProject.getMessageBus().connect(this); projectBusConnection.subscribe(DumbService.DUMB_MODE, new DumbService.DumbModeListener() { @Override public void exitDumbMode() { ApplicationManager.getApplication().invokeLater(() -> { rebuildSearchFieldExtensions(); rebuildList(); }); } }); projectBusConnection.subscribe(AnActionListener.TOPIC, new AnActionListener() { @Override public void afterActionPerformed(@NotNull AnAction action, @NotNull DataContext dataContext, @NotNull AnActionEvent event) { if (action == mySelectedTab.everywhereAction && event.getInputEvent() != null) { myEverywhereAutoSet = false; } } }); ApplicationManager.getApplication() .getMessageBus() .connect(this) .subscribe(ProgressWindow.TOPIC, pw -> Disposer.register(pw, () -> myResultsList.repaint())); mySearchField.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { Component oppositeComponent = e.getOppositeComponent(); if (!isHintComponent(oppositeComponent) && !UIUtil.haveCommonOwner(SearchEverywhereUI.this, oppositeComponent)) { closePopup(); } } }); } private void showDescriptionForIndex(int index) { if (index >= 0 && !myListModel.isMoreElement(index)) { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(index); Object data = contributor.getDataForItem( myListModel.getElementAt(index), SearchEverywhereDataKeys.ITEM_STRING_DESCRIPTION.getName()); if (data instanceof String) { ActionMenu.showDescriptionInStatusBar(true, myResultsList, (String)data); } } } private void registerAction(String actionID, Supplier<? extends AnAction> actionSupplier) { Optional.ofNullable(ActionManager.getInstance().getAction(actionID)) .map(a -> a.getShortcutSet()) .ifPresent(shortcuts -> actionSupplier.get().registerCustomShortcutSet(shortcuts, this, this)); } private void registerAction(String actionID, Consumer<? super AnActionEvent> action) { registerAction(actionID, () -> DumbAwareAction.create(action)); } // when user adds shortcut for "select item" we should add shortcuts // with all possible modifiers (Ctrl, Shift, Alt, etc.) private void registerSelectItemAction() { int[] allowedModifiers = new int[]{ 0, InputEvent.SHIFT_MASK, InputEvent.CTRL_MASK, InputEvent.META_MASK, InputEvent.ALT_MASK }; ShortcutSet selectShortcuts = ActionManager.getInstance().getAction(SearchEverywhereActions.SELECT_ITEM).getShortcutSet(); Collection<KeyboardShortcut> keyboardShortcuts = Arrays.stream(selectShortcuts.getShortcuts()) .filter(shortcut -> shortcut instanceof KeyboardShortcut) .map(shortcut -> (KeyboardShortcut)shortcut) .collect(Collectors.toList()); for (int modifiers : allowedModifiers) { Collection<Shortcut> newShortcuts = new ArrayList<>(); for (KeyboardShortcut shortcut : keyboardShortcuts) { boolean hasSecondStroke = shortcut.getSecondKeyStroke() != null; KeyStroke originalStroke = hasSecondStroke ? shortcut.getSecondKeyStroke() : shortcut.getFirstKeyStroke(); if ((originalStroke.getModifiers() & modifiers) != 0) continue; KeyStroke newStroke = KeyStroke.getKeyStroke(originalStroke.getKeyCode(), originalStroke.getModifiers() | modifiers); newShortcuts.add(hasSecondStroke ? new KeyboardShortcut(shortcut.getFirstKeyStroke(), newStroke) : new KeyboardShortcut(newStroke, null)); } if (newShortcuts.isEmpty()) continue; ShortcutSet newShortcutSet = new CustomShortcutSet(newShortcuts.toArray(Shortcut.EMPTY_ARRAY)); DumbAwareAction.create(event -> { int[] indices = myResultsList.getSelectedIndices(); elementsSelected(indices, modifiers); }).registerCustomShortcutSet(newShortcutSet, this, this); } } private void triggerTabSwitched(AnActionEvent e) { String id = mySelectedTab.getContributor() .map(SearchEverywhereUsageTriggerCollector::getReportableContributorID) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(id) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.TAB_SWITCHED, data); } private void fetchGroups(boolean down) { int index = myResultsList.getSelectedIndex(); do { index += down ? 1 : -1; } while (index >= 0 && index < myListModel.getSize() && !myListModel.isGroupFirstItem(index) && !myListModel.isMoreElement(index)); if (index >= 0 && index < myListModel.getSize()) { myResultsList.setSelectedIndex(index); ScrollingUtil.ensureIndexIsVisible(myResultsList, index, 0); } } private Optional<SearchEverywhereCommandInfo> getSelectedCommand(String typedCommand) { int index = myResultsList.getSelectedIndex(); if (index < 0) return Optional.empty(); SearchEverywhereContributor contributor = myListModel.getContributorForIndex(index); if (contributor != myStubCommandContributor) return Optional.empty(); SearchEverywhereCommandInfo selectedCommand = (SearchEverywhereCommandInfo)myListModel.getElementAt(index); return selectedCommand.getCommand().contains(typedCommand) ? Optional.of(selectedCommand) : Optional.empty(); } @NotNull private static List<SearchEverywhereCommandInfo> getCommandsForCompletion(Collection<? extends SearchEverywhereContributor<?>> contributors, String enteredCommandPart) { Comparator<SearchEverywhereCommandInfo> cmdComparator = (cmd1, cmd2) -> { String cmdName1 = cmd1.getCommand(); String cmdName2 = cmd2.getCommand(); if (!enteredCommandPart.isEmpty()) { if (cmdName1.startsWith(enteredCommandPart) && !cmdName2.startsWith(enteredCommandPart)) return -1; if (!cmdName1.startsWith(enteredCommandPart) && cmdName2.startsWith(enteredCommandPart)) return 1; } return String.CASE_INSENSITIVE_ORDER.compare(cmdName1, cmd2.getCommand()); }; return contributors.stream() .flatMap(contributor -> contributor.getSupportedCommands().stream()) .filter(command -> command.getCommand().contains(enteredCommandPart)) .sorted(cmdComparator) .collect(Collectors.toList()); } private void onMouseClicked(@NotNull MouseEvent e) { boolean multiSelectMode = e.isShiftDown() || UIUtil.isControlKeyDown(e); if (e.getButton() == MouseEvent.BUTTON1 && !multiSelectMode) { e.consume(); final int i = myResultsList.locationToIndex(e.getPoint()); if (i > -1) { myResultsList.setSelectedIndex(i); elementsSelected(new int[]{i}, e.getModifiers()); } } } private boolean isHintComponent(Component component) { if (myHint != null && !myHint.isDisposed() && component != null) { return SwingUtilities.isDescendingFrom(component, myHint.getContent()); } return false; } private void elementsSelected(int[] indexes, int modifiers) { if (indexes.length == 1 && myListModel.isMoreElement(indexes[0])) { SearchEverywhereContributor contributor = myListModel.getContributorForIndex(indexes[0]); showMoreElements(contributor); return; } indexes = Arrays.stream(indexes) .filter(i -> !myListModel.isMoreElement(i)) .toArray(); String searchText = getSearchPattern(); if (searchText.startsWith(SearchTopHitProvider.getTopHitAccelerator()) && searchText.contains(" ")) { featureTriggered(SearchEverywhereUsageTriggerCollector.COMMAND_USED, null); } boolean closePopup = false; for (int i : indexes) { SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(i); Object value = myListModel.getElementAt(i); String selectedTabContributorID = mySelectedTab.getContributor() .map(SearchEverywhereUsageTriggerCollector::getReportableContributorID) .orElse(SearchEverywhereManagerImpl.ALL_CONTRIBUTORS_GROUP_ID); String reportableContributorID = SearchEverywhereUsageTriggerCollector.getReportableContributorID(contributor); FeatureUsageData data = SearchEverywhereUsageTriggerCollector.createData(reportableContributorID, selectedTabContributorID, i); if (value instanceof PsiElement) { data.addLanguage(((PsiElement) value).getLanguage()); } featureTriggered(SearchEverywhereUsageTriggerCollector.CONTRIBUTOR_ITEM_SELECTED, data); closePopup |= contributor.processSelectedItem(value, modifiers, searchText); } if (closePopup) { closePopup(); } else { ApplicationManager.getApplication().invokeLater(() -> myResultsList.repaint()); } } private void showMoreElements(SearchEverywhereContributor contributor) { featureTriggered(SearchEverywhereUsageTriggerCollector.MORE_ITEM_SELECTED, null); Map<SearchEverywhereContributor<?>, Collection<SearchEverywhereFoundElementInfo>> found = myListModel.getFoundElementsMap(); int limit = myListModel.getItemsForContributor(contributor) + (mySelectedTab.getContributor().isPresent() ? SINGLE_CONTRIBUTOR_ELEMENTS_LIMIT : MULTIPLE_CONTRIBUTORS_ELEMENTS_LIMIT); mySearchProgressIndicator = mySearcher.findMoreItems(found, getSearchPattern(), contributor, limit); } private void stopSearching() { if (mySearchProgressIndicator != null && !mySearchProgressIndicator.isCanceled()) { mySearchProgressIndicator.cancel(); } if (myBufferedListener != null) { myBufferedListener.clearBuffer(); } } private void closePopup() { ActionMenu.showDescriptionInStatusBar(true, myResultsList, null); stopSearching(); searchFinishedHandler.run(); } @NotNull private List<SearchEverywhereContributor<?>> getAllTabContributors() { return ContainerUtil.filter(myShownContributors, contributor -> myContributorsFilter.isSelected(contributor.getSearchProviderId())); } @NotNull private Collection<SearchEverywhereContributor<?>> getContributorsForCurrentTab() { return isAllTabSelected() ? getAllTabContributors() : Collections.singleton(mySelectedTab.getContributor().get()); } @TestOnly public Future<List<Object>> findElementsForPattern(String pattern) { CompletableFuture<List<Object>> future = new CompletableFuture<>(); mySearchListener.setTestCallback(list -> { future.complete(list); mySearchListener.setTestCallback(null); }); mySearchField.setText(pattern); return future; } private class CompositeCellRenderer implements ListCellRenderer<Object> { @Override public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { if (value == SearchListModel.MORE_ELEMENT) { Component component = myMoreRenderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); component.setPreferredSize(UIUtil.updateListRowHeight(component.getPreferredSize())); return component; } SearchEverywhereContributor<Object> contributor = myListModel.getContributorForIndex(index); Component component = SearchEverywhereClassifier.EP_Manager.getListCellRendererComponent( list, value, index, isSelected, cellHasFocus); if (component == null) { component = contributor.getElementsRenderer().getListCellRendererComponent( list, value, index, isSelected, true); } if (component instanceof JComponent) { Border border = ((JComponent)component).getBorder(); if (border != GotoActionModel.GotoActionListCellRenderer.TOGGLE_BUTTON_BORDER) { ((JComponent)component).setBorder(JBUI.Borders.empty(1, 2)); } } AppUIUtil.targetToDevice(component, list); component.setPreferredSize(UIUtil.updateListRowHeight(component.getPreferredSize())); if (isAllTabSelected() && myListModel.isGroupFirstItem(index)) { component = myGroupTitleRenderer.withDisplayedData(contributor.getFullGroupName(), component); } return component; } } private final ListCellRenderer<Object> myCommandRenderer = new ColoredListCellRenderer<Object>() { @Override protected void customizeCellRenderer(@NotNull JList<?> list, Object value, int index, boolean selected, boolean hasFocus) { setPaintFocusBorder(false); setIcon(EmptyIcon.ICON_16); setFont(list.getFont()); SearchEverywhereCommandInfo command = (SearchEverywhereCommandInfo)value; append(command.getCommandWithPrefix() + " ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, list.getForeground())); append(command.getDefinition(), new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, JBColor.GRAY)); setBackground(UIUtil.getListBackground(selected)); } }; private final ListCellRenderer<Object> myMoreRenderer = new ColoredListCellRenderer<Object>() { @Override protected int getMinHeight() { return -1; } @Override protected void customizeCellRenderer(@NotNull JList<?> list, Object value, int index, boolean selected, boolean hasFocus) { if (value != SearchListModel.MORE_ELEMENT) { throw new AssertionError(value); } setFont(UIUtil.getLabelFont().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.SMALL))); append(IdeBundle.message("search.everywhere.points.more"), SMALL_LABEL_ATTRS); setIpad(JBInsets.create(1, 7)); setMyBorder(null); } }; private final GroupTitleRenderer myGroupTitleRenderer = new GroupTitleRenderer(); private static class GroupTitleRenderer extends CellRendererPanel { final SimpleColoredComponent titleLabel = new SimpleColoredComponent(); GroupTitleRenderer() { setLayout(new BorderLayout()); SeparatorComponent separatorComponent = new SeparatorComponent( titleLabel.getPreferredSize().height / 2, JBUI.CurrentTheme.BigPopup.listSeparatorColor(), null); JPanel topPanel = JBUI.Panels.simplePanel(5, 0) .addToCenter(separatorComponent) .addToLeft(titleLabel) .withBorder(JBUI.Borders.empty(1, 7)) .withBackground(UIUtil.getListBackground()); add(topPanel, BorderLayout.NORTH); } public GroupTitleRenderer withDisplayedData(String title, Component itemContent) { titleLabel.clear(); titleLabel.append(title, SMALL_LABEL_ATTRS); Component prevContent = ((BorderLayout)getLayout()).getLayoutComponent(BorderLayout.CENTER); if (prevContent != null) { remove(prevContent); } add(itemContent, BorderLayout.CENTER); accessibleContext = itemContent.getAccessibleContext(); return this; } } public static class SearchListModel extends AbstractListModel<Object> { static final Object MORE_ELEMENT = new Object(); private final List<SearchEverywhereFoundElementInfo> listElements = new ArrayList<>(); private boolean resultsExpired = false; public boolean isResultsExpired() { return resultsExpired; } public void expireResults() { resultsExpired = true; } @Override public int getSize() { return listElements.size(); } @Override public Object getElementAt(int index) { return listElements.get(index).getElement(); } public List<Object> getItems() { return new ArrayList<>(values()); } public Collection<Object> getFoundItems(SearchEverywhereContributor contributor) { return listElements.stream() .filter(info -> info.getContributor() == contributor && info.getElement() != MORE_ELEMENT) .map(info -> info.getElement()) .collect(Collectors.toList()); } public boolean hasMoreElements(SearchEverywhereContributor contributor) { return listElements.stream() .anyMatch(info -> info.getElement() == MORE_ELEMENT && info.getContributor() == contributor); } public void addElements(List<? extends SearchEverywhereFoundElementInfo> items) { if (items.isEmpty()) { return; } Map<SearchEverywhereContributor<?>, List<SearchEverywhereFoundElementInfo>> itemsMap = new HashMap<>(); items.forEach(info -> { List<SearchEverywhereFoundElementInfo> list = itemsMap.computeIfAbsent(info.getContributor(), contributor -> new ArrayList<>()); list.add(info); }); itemsMap.forEach((contributor, list) -> list.sort(Comparator.comparingInt(SearchEverywhereFoundElementInfo::getPriority).reversed())); if (resultsExpired) { retainContributors(itemsMap.keySet()); clearMoreItems(); itemsMap.forEach((contributor, list) -> { Object[] oldItems = ArrayUtil.toObjectArray(getFoundItems(contributor)); Object[] newItems = list.stream() .map(SearchEverywhereFoundElementInfo::getElement) .toArray(); try { Diff.Change change = Diff.buildChanges(oldItems, newItems); applyChange(change, contributor, list); } catch (FilesTooBigForDiffException e) { LOG.error("Cannot calculate diff for updated search results"); } }); resultsExpired = false; } else { itemsMap.forEach((contributor, list) -> { int startIndex = contributors().indexOf(contributor); int insertionIndex = getInsertionPoint(contributor); int endIndex = insertionIndex + list.size() - 1; listElements.addAll(insertionIndex, list); fireIntervalAdded(this, insertionIndex, endIndex); // there were items for this contributor before update if (startIndex >= 0) { listElements.subList(startIndex, endIndex + 1) .sort(Comparator.comparingInt(SearchEverywhereFoundElementInfo::getPriority).reversed()); fireContentsChanged(this, startIndex, endIndex); } }); } } private void retainContributors(Collection<SearchEverywhereContributor<?>> retainContributors) { Iterator<SearchEverywhereFoundElementInfo> iterator = listElements.iterator(); int startInterval = 0; int endInterval = -1; while (iterator.hasNext()) { SearchEverywhereFoundElementInfo item = iterator.next(); if (retainContributors.contains(item.getContributor())) { if (startInterval <= endInterval) { fireIntervalRemoved(this, startInterval, endInterval); startInterval = endInterval + 2; } else { startInterval++; } } else { iterator.remove(); } endInterval++; } if (startInterval <= endInterval) { fireIntervalRemoved(this, startInterval, endInterval); } } private void clearMoreItems() { ListIterator<SearchEverywhereFoundElementInfo> iterator = listElements.listIterator(); while (iterator.hasNext()) { int index = iterator.nextIndex(); if (iterator.next().getElement() == MORE_ELEMENT) { iterator.remove(); fireContentsChanged(this, index, index); } } } private void applyChange(Diff.Change change, SearchEverywhereContributor<?> contributor, List<SearchEverywhereFoundElementInfo> newItems) { int firstItemIndex = contributors().indexOf(contributor); if (firstItemIndex < 0) { firstItemIndex = getInsertionPoint(contributor); } for (Diff.Change ch : toRevertedList(change)) { if (ch.deleted > 0) { for (int i = ch.deleted - 1; i >= 0; i--) { int index = firstItemIndex + ch.line0 + i; listElements.remove(index); } fireIntervalRemoved(this, firstItemIndex + ch.line0, firstItemIndex + ch.line0 + ch.deleted - 1); } if (ch.inserted > 0) { List<SearchEverywhereFoundElementInfo> addedItems = newItems.subList(ch.line1, ch.line1 + ch.inserted); listElements.addAll(firstItemIndex + ch.line0, addedItems); fireIntervalAdded(this, firstItemIndex + ch.line0, firstItemIndex + ch.line0 + ch.inserted - 1); } } } private static List<Diff.Change> toRevertedList(Diff.Change change) { List<Diff.Change> res = new ArrayList<>(); while (change != null) { res.add(0, change); change = change.link; } return res; } public void removeElement(@NotNull Object item, SearchEverywhereContributor contributor) { int index = contributors().indexOf(contributor); if (index < 0) { return; } while (index < listElements.size() && listElements.get(index).getContributor() == contributor) { if (item.equals(listElements.get(index).getElement())) { listElements.remove(index); fireIntervalRemoved(this, index, index); return; } index++; } } public void setHasMore(SearchEverywhereContributor<?> contributor, boolean newVal) { int index = contributors().lastIndexOf(contributor); if (index < 0) { return; } boolean alreadyHas = isMoreElement(index); if (alreadyHas && !newVal) { listElements.remove(index); fireIntervalRemoved(this, index, index); } if (!alreadyHas && newVal) { index += 1; listElements.add(index, new SearchEverywhereFoundElementInfo(MORE_ELEMENT, 0, contributor)); fireIntervalAdded(this, index, index); } } public void clear() { int index = listElements.size() - 1; listElements.clear(); if (index >= 0) { fireIntervalRemoved(this, 0, index); } } public boolean contains(Object val) { return values().contains(val); } public boolean isMoreElement(int index) { return listElements.get(index).getElement() == MORE_ELEMENT; } public <Item> SearchEverywhereContributor<Item> getContributorForIndex(int index) { //noinspection unchecked return (SearchEverywhereContributor<Item>)listElements.get(index).getContributor(); } public boolean isGroupFirstItem(int index) { return index == 0 || listElements.get(index).getContributor() != listElements.get(index - 1).getContributor(); } public int getItemsForContributor(SearchEverywhereContributor<?> contributor) { List<SearchEverywhereContributor> contributorsList = contributors(); int first = contributorsList.indexOf(contributor); int last = contributorsList.lastIndexOf(contributor); if (isMoreElement(last)) { last -= 1; } return last - first + 1; } public Map<SearchEverywhereContributor<?>, Collection<SearchEverywhereFoundElementInfo>> getFoundElementsMap() { return listElements.stream() .filter(info -> info.element != MORE_ELEMENT) .collect(Collectors.groupingBy(o -> o.getContributor(), Collectors.toCollection(ArrayList::new))); } @NotNull private List<SearchEverywhereContributor> contributors() { return Lists.transform(listElements, info -> info.getContributor()); } @NotNull private List<Object> values() { return Lists.transform(listElements, info -> info.getElement()); } private int getInsertionPoint(SearchEverywhereContributor contributor) { if (listElements.isEmpty()) { return 0; } List<SearchEverywhereContributor> list = contributors(); int index = list.lastIndexOf(contributor); if (index >= 0) { return isMoreElement(index) ? index : index + 1; } index = Collections.binarySearch(list, contributor, Comparator.comparingInt(SearchEverywhereContributor::getSortWeight)); return -index - 1; } } private class ShowInFindToolWindowAction extends DumbAwareAction { ShowInFindToolWindowAction() { super(IdeBundle.messagePointer("show.in.find.window.button.name"), IdeBundle.messagePointer("show.in.find.window.button.description"), AllIcons.General.Pin_tab); } @Override public void actionPerformed(@NotNull AnActionEvent e) { stopSearching(); Collection<SearchEverywhereContributor<?>> contributors = getContributorsForCurrentTab(); contributors = ContainerUtil.filter(contributors, SearchEverywhereContributor::showInFindResults); if (contributors.isEmpty()) { return; } String searchText = getSearchPattern(); String contributorsString = contributors.stream() .map(SearchEverywhereContributor::getGroupName) .collect(Collectors.joining(", ")); UsageViewPresentation presentation = new UsageViewPresentation(); String tabCaptionText = IdeBundle.message("searcheverywhere.found.matches.title", searchText, contributorsString); presentation.setCodeUsagesString(tabCaptionText); presentation.setUsagesInGeneratedCodeString( IdeBundle.message("searcheverywhere.found.matches.generated.code.title", searchText, contributorsString)); presentation.setTargetsNodeText(IdeBundle.message("searcheverywhere.found.targets.title", searchText, contributorsString)); presentation.setTabName(tabCaptionText); presentation.setTabText(tabCaptionText); Collection<Usage> usages = new LinkedHashSet<>(); Collection<PsiElement> targets = new LinkedHashSet<>(); Collection<Object> cached = contributors.stream() .flatMap(contributor -> myListModel.getFoundItems(contributor).stream()) .collect(Collectors.toSet()); fillUsages(cached, usages, targets); Collection<SearchEverywhereContributor<?>> contributorsForAdditionalSearch; contributorsForAdditionalSearch = ContainerUtil.filter(contributors, contributor -> myListModel.hasMoreElements(contributor)); closePopup(); if (!contributorsForAdditionalSearch.isEmpty()) { ProgressManager.getInstance().run(new Task.Modal(myProject, tabCaptionText, true) { private final ProgressIndicator progressIndicator = new ProgressIndicatorBase(); @Override public void run(@NotNull ProgressIndicator indicator) { progressIndicator.start(); TooManyUsagesStatus tooManyUsagesStatus = TooManyUsagesStatus.createFor(progressIndicator); Collection<Object> foundElements = new ArrayList<>(); int alreadyFoundCount = cached.size(); for (SearchEverywhereContributor<?> contributor : contributorsForAdditionalSearch) { if (progressIndicator.isCanceled()) break; try { fetch(contributor, foundElements, alreadyFoundCount, tooManyUsagesStatus); } catch (ProcessCanceledException ignore) { } } fillUsages(foundElements, usages, targets); } <Item> void fetch(SearchEverywhereContributor<Item> contributor, Collection<Object> foundElements, int alreadyFoundCount, TooManyUsagesStatus tooManyUsagesStatus) { contributor.fetchElements(searchText, progressIndicator, o -> { if (progressIndicator.isCanceled()) { return false; } if (cached.contains(o)) { return true; } foundElements.add(o); tooManyUsagesStatus.pauseProcessingIfTooManyUsages(); if (foundElements.size() + alreadyFoundCount >= UsageLimitUtil.USAGES_LIMIT && tooManyUsagesStatus.switchTooManyUsagesStatus()) { int usageCount = foundElements.size() + alreadyFoundCount; UsageViewManagerImpl.showTooManyUsagesWarningLater( getProject(), tooManyUsagesStatus, progressIndicator, presentation, usageCount, null); return !progressIndicator.isCanceled(); } return true; }); } @Override public void onCancel() { progressIndicator.cancel(); } @Override public void onSuccess() { showInFindWindow(targets, usages, presentation); } @Override public void onThrowable(@NotNull Throwable error) { progressIndicator.cancel(); } }); } else { showInFindWindow(targets, usages, presentation); } } private void fillUsages(Collection<Object> foundElements, Collection<? super Usage> usages, Collection<? super PsiElement> targets) { ReadAction.run(() -> foundElements.stream() .filter(o -> o instanceof PsiElement) .forEach(o -> { PsiElement element = (PsiElement)o; if (element.getTextRange() != null) { UsageInfo usageInfo = new UsageInfo(element); usages.add(new UsageInfo2UsageAdapter(usageInfo)); } else { targets.add(element); } })); } private void showInFindWindow(Collection<? extends PsiElement> targets, Collection<Usage> usages, UsageViewPresentation presentation) { UsageTarget[] targetsArray = targets.isEmpty() ? UsageTarget.EMPTY_ARRAY : PsiElement2UsageTargetAdapter.convert(PsiUtilCore.toPsiElementArray(targets)); Usage[] usagesArray = usages.toArray(Usage.EMPTY_ARRAY); UsageViewManager.getInstance(myProject).showUsages(targetsArray, usagesArray, presentation); } @Override public void update(@NotNull AnActionEvent e) { SearchEverywhereContributor<?> contributor = mySelectedTab == null ? null : mySelectedTab.contributor; e.getPresentation().setEnabled(contributor == null || contributor.showInFindResults()); e.getPresentation().setIcon(ToolWindowManager.getInstance(myProject).getLocationIcon(ToolWindowId.FIND, AllIcons.General.Pin_tab)); } } static class FiltersAction extends ShowFilterAction { final PersistentSearchEverywhereContributorFilter<?> filter; final Runnable rebuildRunnable; FiltersAction(@NotNull PersistentSearchEverywhereContributorFilter<?> filter, @NotNull Runnable rebuildRunnable) { this.filter = filter; this.rebuildRunnable = rebuildRunnable; } @Override public boolean isEnabled() { return true; } @Override protected boolean isActive() { return filter.getAllElements().size() != filter.getSelectedElements().size(); } @Override protected ElementsChooser<?> createChooser() { return createChooser(filter, rebuildRunnable); } private static <T> ElementsChooser<T> createChooser(@NotNull PersistentSearchEverywhereContributorFilter<T> filter, @NotNull Runnable rebuildRunnable) { ElementsChooser<T> res = new ElementsChooser<T>(filter.getAllElements(), false) { @Override protected String getItemText(@NotNull T value) { return filter.getElementText(value); } @Nullable @Override protected Icon getItemIcon(@NotNull T value) { return filter.getElementIcon(value); } }; res.markElements(filter.getSelectedElements()); ElementsChooser.ElementsMarkListener<T> listener = (element, isMarked) -> { filter.setSelected(element, isMarked); rebuildRunnable.run(); }; res.addElementsMarkListener(listener); return res; } } private class CompleteCommandAction extends DumbAwareAction { @Override public void actionPerformed(@NotNull AnActionEvent e) { if (completeCommand()) { FeatureUsageData data = SearchEverywhereUsageTriggerCollector .createData(null) .addInputEvent(e); featureTriggered(SearchEverywhereUsageTriggerCollector.COMMAND_COMPLETED, data); } } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabled(getCompleteCommand().isPresent()); } private boolean completeCommand() { Optional<SearchEverywhereCommandInfo> suggestedCommand = getCompleteCommand(); if (suggestedCommand.isPresent()) { mySearchField.setText(suggestedCommand.get().getCommandWithPrefix() + " "); return true; } return false; } private Optional<SearchEverywhereCommandInfo> getCompleteCommand() { String pattern = getSearchPattern(); String commandPrefix = SearchTopHitProvider.getTopHitAccelerator(); if (pattern.startsWith(commandPrefix) && !pattern.contains(" ")) { String typedCommand = pattern.substring(commandPrefix.length()); SearchEverywhereCommandInfo command = getSelectedCommand(typedCommand).orElseGet(() -> { List<SearchEverywhereCommandInfo> completions = getCommandsForCompletion(getContributorsForCurrentTab(), typedCommand); return completions.isEmpty() ? null : completions.get(0); }); return Optional.ofNullable(command); } return Optional.empty(); } } private String getNotFoundText() { return mySelectedTab.getContributor() .map(c -> IdeBundle.message("searcheverywhere.nothing.found.for.contributor.anywhere", c.getFullGroupName().toLowerCase(Locale.ROOT))) .orElse(IdeBundle.message("searcheverywhere.nothing.found.for.all.anywhere")); } private void featureTriggered(@NotNull String featureID, @Nullable FeatureUsageData data) { if (data != null) { SearchEverywhereUsageTriggerCollector.trigger(myProject, featureID, data); } else { SearchEverywhereUsageTriggerCollector.trigger(myProject, featureID); } } private final SearchListener mySearchListener = new SearchListener(); private class SearchListener implements SESearcher.Listener { private Consumer<List<Object>> testCallback; @Override public void elementsAdded(@NotNull List<? extends SearchEverywhereFoundElementInfo> list) { boolean wasEmpty = myListModel.listElements.isEmpty(); mySelectionTracker.lock(); myListModel.addElements(list); mySelectionTracker.unlock(); mySelectionTracker.restoreSelection(); if (wasEmpty && !myListModel.listElements.isEmpty()) { Object prevSelection = ((SearchEverywhereManagerImpl)SearchEverywhereManager.getInstance(myProject)) .getPrevSelection(getSelectedContributorID()); if (prevSelection instanceof Integer) { for (SearchEverywhereFoundElementInfo info : myListModel.listElements) { if (Objects.hashCode(info.element) == ((Integer)prevSelection).intValue()) { myResultsList.setSelectedValue(info.element, true); break; } } } } } @Override public void elementsRemoved(@NotNull List<? extends SearchEverywhereFoundElementInfo> list) { list.forEach(info -> myListModel.removeElement(info.getElement(), info.getContributor())); } @Override public void searchFinished(@NotNull Map<SearchEverywhereContributor<?>, Boolean> hasMoreContributors) { if (myResultsList.isEmpty() || myListModel.isResultsExpired()) { if (myEverywhereAutoSet && !isEverywhere() && canToggleEverywhere() && !getSearchPattern().isEmpty()) { setEverywhereAuto(true); myNotFoundString = getSearchPattern(); return; } hideHint(); if (myListModel.isResultsExpired()) { myListModel.clear(); } } myResultsList.setEmptyText(getSearchPattern().isEmpty() ? "" : getNotFoundText()); hasMoreContributors.forEach(myListModel::setHasMore); mySelectionTracker.resetSelectionIfNeeded(); if (testCallback != null) testCallback.consume(myListModel.getItems()); } @TestOnly void setTestCallback(@Nullable Consumer<List<Object>> callback) { testCallback = callback; } } private final SearchEverywhereContributor<Object> myStubCommandContributor = new SearchEverywhereContributor<Object>() { @NotNull @Override public String getSearchProviderId() { return "CommandsContributor"; } @NotNull @Override public String getGroupName() { return IdeBundle.message("searcheverywhere.commands.tab.name"); } @Override public int getSortWeight() { return 10; } @Override public boolean showInFindResults() { return false; } @Override public void fetchElements(@NotNull String pattern, @NotNull ProgressIndicator progressIndicator, @NotNull Processor<? super Object> consumer) {} @Override public boolean processSelectedItem(@NotNull Object selected, int modifiers, @NotNull String searchText) { mySearchField.setText(((SearchEverywhereCommandInfo)selected).getCommandWithPrefix() + " "); featureTriggered(SearchEverywhereUsageTriggerCollector.COMMAND_COMPLETED, null); return false; } @NotNull @Override public ListCellRenderer<? super Object> getElementsRenderer() { return myCommandRenderer; } @Nullable @Override public Object getDataForItem(@NotNull Object element, @NotNull String dataId) { return null; } }; private final ExtendableTextField.Extension hintExtension = new ExtendableTextField.Extension() { private final TextIcon icon; { String message = IdeBundle.message("searcheverywhere.textfield.hint", SearchTopHitProvider.getTopHitAccelerator()); Color color = JBUI.CurrentTheme.BigPopup.searchFieldGrayForeground(); icon = new TextIcon(message, color, null, 0); icon.setFont(RelativeFont.SMALL.derive(getFont())); } @Override public Icon getIcon(boolean hovered) { return icon; } }; }
IDEA-231276 Hint in Search Everywhere is cut on macOS Catalina GitOrigin-RevId: be16b9ac249a75ad020a9302afb2e9bfb5ec61f6
platform/lang-impl/src/com/intellij/ide/actions/searcheverywhere/SearchEverywhereUI.java
IDEA-231276 Hint in Search Everywhere is cut on macOS Catalina
Java
apache-2.0
335f9d09a70fb9f26eef322cc7c9b25bc9e0edb3
0
polopoly/typica
// // typica - A client library for Amazon Web Services // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.xerox.amazonws.sdb; import java.util.ArrayList; import java.util.List; /** * This class implements a thread-safe, persistent counter, backed by SimpleDB. * * @author D. Kavanagh */ public class Counter { private Domain domain; private String name; /** * Constructs a counter, where a value may already be stored. If no value is * assigned, the counter is initialized to zero (the first counter value will be 1). * * @param domain the domain to use for the counter * @param counterName the name of this counter, must be unique within this domain * @throws SDBException wraps checked exceptions */ public Counter(Domain domain, String counterName) throws SDBException { this.domain = domain; this.name = counterName; // check for value, if none, default to 0 Item i = domain.getItem(name); List<ItemAttribute> attrs = i.getAttributes(); if (attrs == null) { attrs = new ArrayList<ItemAttribute>(); } ItemAttribute attr = null; if (attrs.size() > 0) { attr = attrs.get(0); } if (attr != null) { String val = attr.getValue(); if (val != null) { // good, there's a value return; } } else { attrs.add(new ItemAttribute("Value", "0", true)); } i.putAttributes(attrs); } /** * Constructs a counter with a specified initial value * * @param domain the domain to use for the counter * @param counterName the name of this counter, must be unique within this domain * @param initValue the initial value for the counter * @throws SDBException wraps checked exceptions */ public Counter(Domain domain, String counterName, long initValue) throws SDBException { this(domain, counterName); // initialize counter Item i = domain.getItem(name); ItemAttribute attr = new ItemAttribute("Value", ""+initValue, true); ArrayList<ItemAttribute> attrs = new ArrayList<ItemAttribute>(); attrs.add(attr); i.putAttributes(attrs); } /** * This method returns the counter name * * @return the name of the counter */ public String getName() { return name; } /** * This method returns the next value, period. * * @return the next counter value */ public long nextValue() throws SDBException { Item i = domain.getItem(name); List<ItemAttribute> attrs = i.getAttributes(); ItemAttribute attr = attrs.get(0); String val = attr.getValue(); long value = Long.parseLong(val); boolean done = false; while (!done) { try { attr = new ItemAttribute("Value", ""+(value+1), true); attrs.clear(); attrs.add(attr); ArrayList<Condition> conds = new ArrayList<Condition>(); conds.add(new Condition("Value", ""+value)); i.putAttributes(attrs, conds); done = true; } catch (SDBException ex) { String msg = ex.getErrors().get(0).getCode(); if (msg.equals("ConditionalCheckFailed")) { // increment, pause and try again value++; try { Thread.sleep(500); } catch (InterruptedException ie) {} } else { throw ex; } } } return value+1; } }
java/com/xerox/amazonws/sdb/Counter.java
// // typica - A client library for Amazon Web Services // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.xerox.amazonws.sdb; import java.util.ArrayList; import java.util.List; /** * This class implements a thread-safe, persistent counter, backed by SimpleDB. * * @author D. Kavanagh */ public class Counter { private Domain domain; private String name; /** * Constructs a counter, where a value may already be stored. If no value is * assigned, the counter is initialized to zero (the first counter value will be 1). * * @param domain the domain to use for the counter * @param counterName the name of this counter, must be unique within this domain * @throws SDBException wraps checked exceptions */ public Counter(Domain domain, String counterName) throws SDBException { this.domain = domain; this.name = counterName; // check for value, if none, default to 0 Item i = domain.getItem(name); List<ItemAttribute> attrs = i.getAttributes(); if (attrs == null) { attrs = new ArrayList<ItemAttribute>(); } ItemAttribute attr = null; if (attrs.size() > 0) { attr = attrs.get(0); } if (attr != null) { String val = attr.getValue(); if (val != null) { // good, there's a value return; } } else { attrs.add(new ItemAttribute("Value", "0", true)); } i.putAttributes(attrs); } /** * Constructs a counter with a specified initial value * * @param domain the domain to use for the counter * @param counterName the name of this counter, must be unique within this domain * @param initValue the initial value for the counter * @throws SDBException wraps checked exceptions */ public Counter(Domain domain, String counterName, int initValue) throws SDBException { this(domain, counterName); // initialize counter Item i = domain.getItem(name); ItemAttribute attr = new ItemAttribute("Value", ""+initValue, true); ArrayList<ItemAttribute> attrs = new ArrayList<ItemAttribute>(); attrs.add(attr); i.putAttributes(attrs); } /** * This method returns the counter name * * @return the name of the counter */ public String getName() { return name; } /** * This method returns the next value, period. * * @return the next counter value */ public int nextValue() throws SDBException { Item i = domain.getItem(name); List<ItemAttribute> attrs = i.getAttributes(); ItemAttribute attr = attrs.get(0); String val = attr.getValue(); int value = Integer.parseInt(val); boolean done = false; while (!done) { try { attr = new ItemAttribute("Value", ""+(value+1), true); attrs.clear(); attrs.add(attr); ArrayList<Condition> conds = new ArrayList<Condition>(); conds.add(new Condition("Value", ""+value)); i.putAttributes(attrs, conds); done = true; } catch (SDBException ex) { String msg = ex.getErrors().get(0).getCode(); if (msg.equals("ConditionalCheckFailed")) { // increment, pause and try again value++; try { Thread.sleep(500); } catch (InterruptedException ie) {} } else { throw ex; } } } return value+1; } }
converted counter to use long git-svn-id: ed72533e8e76b4727d8751eef669f13bfa24ccfa@311 7ed6c317-272d-0410-8ab0-49a3fbf2961c
java/com/xerox/amazonws/sdb/Counter.java
converted counter to use long
Java
apache-2.0
fb7a1dc8a26399703cf52f8f1dc86d68abe17a71
0
Nickname0806/Test_Q4,Nickname0806/Test_Q4,apache/tomcat,apache/tomcat,apache/tomcat,Nickname0806/Test_Q4,Nickname0806/Test_Q4,apache/tomcat,apache/tomcat
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.startup; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.management.ObjectName; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Host; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleEvent; import org.apache.catalina.LifecycleListener; import org.apache.catalina.core.StandardHost; import org.apache.catalina.util.ContextName; import org.apache.catalina.util.IOTools; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.digester.Digester; import org.apache.tomcat.util.modeler.Registry; import org.apache.tomcat.util.res.StringManager; /** * Startup event listener for a <b>Host</b> that configures the properties * of that Host, and the associated defined contexts. * * @author Craig R. McClanahan * @author Remy Maucherat * @version $Id$ */ public class HostConfig implements LifecycleListener { private static final Log log = LogFactory.getLog( HostConfig.class ); // ----------------------------------------------------- Instance Variables /** * The Java class name of the Context configuration class we should use. */ protected String configClass = "org.apache.catalina.startup.ContextConfig"; /** * The Java class name of the Context implementation we should use. */ protected String contextClass = "org.apache.catalina.core.StandardContext"; /** * The Host we are associated with. */ protected Host host = null; /** * The JMX ObjectName of this component. */ protected ObjectName oname = null; /** * The string resources for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); /** * Should we deploy XML Context config files packaged with WAR files and * directories? */ protected boolean deployXML = false; /** * Should XML files be copied to * $CATALINA_BASE/conf/&lt;engine&gt;/&lt;host&gt; by default when * a web application is deployed? */ protected boolean copyXML = false; /** * Should we unpack WAR files when auto-deploying applications in the * <code>appBase</code> directory? */ protected boolean unpackWARs = false; /** * Map of deployed applications. */ protected final Map<String, DeployedApplication> deployed = new ConcurrentHashMap<>(); /** * List of applications which are being serviced, and shouldn't be * deployed/undeployed/redeployed at the moment. */ protected final ArrayList<String> serviced = new ArrayList<>(); /** * The <code>Digester</code> instance used to parse context descriptors. */ protected static final Digester digester = createDigester(); /** * The list of Wars in the appBase to be ignored because they are invalid * (e.g. contain /../ sequences). */ protected final Set<String> invalidWars = new HashSet<>(); // ------------------------------------------------------------- Properties /** * Return the Context configuration class name. */ public String getConfigClass() { return (this.configClass); } /** * Set the Context configuration class name. * * @param configClass The new Context configuration class name. */ public void setConfigClass(String configClass) { this.configClass = configClass; } /** * Return the Context implementation class name. */ public String getContextClass() { return (this.contextClass); } /** * Set the Context implementation class name. * * @param contextClass The new Context implementation class name. */ public void setContextClass(String contextClass) { this.contextClass = contextClass; } /** * Return the deploy XML config file flag for this component. */ public boolean isDeployXML() { return (this.deployXML); } /** * Set the deploy XML config file flag for this component. * * @param deployXML The new deploy XML flag */ public void setDeployXML(boolean deployXML) { this.deployXML= deployXML; } /** * Return the copy XML config file flag for this component. */ public boolean isCopyXML() { return (this.copyXML); } /** * Set the copy XML config file flag for this component. * * @param copyXML The new copy XML flag */ public void setCopyXML(boolean copyXML) { this.copyXML= copyXML; } /** * Return the unpack WARs flag. */ public boolean isUnpackWARs() { return (this.unpackWARs); } /** * Set the unpack WARs flag. * * @param unpackWARs The new unpack WARs flag */ public void setUnpackWARs(boolean unpackWARs) { this.unpackWARs = unpackWARs; } // --------------------------------------------------------- Public Methods /** * Process the START event for an associated Host. * * @param event The lifecycle event that has occurred */ @Override public void lifecycleEvent(LifecycleEvent event) { if (event.getType().equals(Lifecycle.PERIODIC_EVENT)) check(); // Identify the host we are associated with try { host = (Host) event.getLifecycle(); if (host instanceof StandardHost) { setCopyXML(((StandardHost) host).isCopyXML()); setDeployXML(((StandardHost) host).isDeployXML()); setUnpackWARs(((StandardHost) host).isUnpackWARs()); } } catch (ClassCastException e) { log.error(sm.getString("hostConfig.cce", event.getLifecycle()), e); return; } // Process the event that has occurred if (event.getType().equals(Lifecycle.START_EVENT)) start(); else if (event.getType().equals(Lifecycle.STOP_EVENT)) stop(); } /** * Add a serviced application to the list. */ public synchronized void addServiced(String name) { serviced.add(name); } /** * Is application serviced ? * @return state of the application */ public synchronized boolean isServiced(String name) { return (serviced.contains(name)); } /** * Removed a serviced application from the list. */ public synchronized void removeServiced(String name) { serviced.remove(name); } /** * Get the instant where an application was deployed. * @return 0L if no application with that name is deployed, or the instant * on which the application was deployed */ public long getDeploymentTime(String name) { DeployedApplication app = deployed.get(name); if (app == null) { return 0L; } return app.timestamp; } /** * Has the specified application been deployed? Note applications defined * in server.xml will not have been deployed. * @return <code>true</code> if the application has been deployed and * <code>false</code> if the application has not been deployed or does not * exist */ public boolean isDeployed(String name) { DeployedApplication app = deployed.get(name); if (app == null) { return false; } return true; } // ------------------------------------------------------ Protected Methods /** * Create the digester which will be used to parse context config files. */ protected static Digester createDigester() { Digester digester = new Digester(); digester.setValidating(false); // Add object creation rule digester.addObjectCreate("Context", "org.apache.catalina.core.StandardContext", "className"); // Set the properties on that object (it doesn't matter if extra // properties are set) digester.addSetProperties("Context"); return (digester); } protected File returnCanonicalPath(String path) { File file = new File(path); if (!file.isAbsolute()) file = new File(host.getCatalinaBase(), path); try { return file.getCanonicalFile(); } catch (IOException e) { return file; } } /** * Get the name of the configBase. * For use with JMX management. */ public String getConfigBaseName() { return host.getConfigBaseFile().getAbsolutePath(); } /** * Deploy applications for any directories or WAR files that are found * in our "application root" directory. */ protected void deployApps() { File appBase = host.getAppBaseFile(); File configBase = host.getConfigBaseFile(); String[] filteredAppPaths = filterAppPaths(appBase.list()); // Deploy XML descriptors from configBase deployDescriptors(configBase, configBase.list()); // Deploy WARs deployWARs(appBase, filteredAppPaths); // Deploy expanded folders deployDirectories(appBase, filteredAppPaths); } /** * Filter the list of application file paths to remove those that match * the regular expression defined by {@link Host#getDeployIgnore()}. * * @param unfilteredAppPaths The list of application paths to filtert * * @return The filtered list of application paths */ protected String[] filterAppPaths(String[] unfilteredAppPaths) { Pattern filter = host.getDeployIgnorePattern(); if (filter == null) { return unfilteredAppPaths; } List<String> filteredList = new ArrayList<>(); Matcher matcher = null; for (String appPath : unfilteredAppPaths) { if (matcher == null) { matcher = filter.matcher(appPath); } else { matcher.reset(appPath); } if (matcher.matches()) { if (log.isDebugEnabled()) { log.debug(sm.getString("hostConfig.ignorePath", appPath)); } } else { filteredList.add(appPath); } } return filteredList.toArray(new String[filteredList.size()]); } /** * Deploy applications for any directories or WAR files that are found * in our "application root" directory. */ protected void deployApps(String name) { File appBase = host.getAppBaseFile(); File configBase = host.getConfigBaseFile(); ContextName cn = new ContextName(name); String baseName = cn.getBaseName(); if (deploymentExists(baseName)) { return; } // Deploy XML descriptor from configBase File xml = new File(configBase, baseName + ".xml"); if (xml.exists()) { deployDescriptor(cn, xml); return; } // Deploy WAR File war = new File(appBase, baseName + ".war"); if (war.exists()) { deployWAR(cn, war); return; } // Deploy expanded folder File dir = new File(appBase, baseName); if (dir.exists()) deployDirectory(cn, dir); } /** * Deploy XML context descriptors. */ protected void deployDescriptors(File configBase, String[] files) { if (files == null) return; ExecutorService es = host.getStartStopExecutor(); List<Future<?>> results = new ArrayList<>(); for (int i = 0; i < files.length; i++) { File contextXml = new File(configBase, files[i]); if (files[i].toLowerCase(Locale.ENGLISH).endsWith(".xml")) { ContextName cn = new ContextName(files[i]); if (isServiced(cn.getName()) || deploymentExists(cn.getName())) continue; results.add( es.submit(new DeployDescriptor(this, cn, contextXml))); } } for (Future<?> result : results) { try { result.get(); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.threaded.error"), e); } } } /** * @param cn * @param contextXml */ @SuppressWarnings("null") // context is not null protected void deployDescriptor(ContextName cn, File contextXml) { DeployedApplication deployedApp = new DeployedApplication(cn.getName()); // Assume this is a configuration descriptor and deploy it if(log.isInfoEnabled()) { log.info(sm.getString("hostConfig.deployDescriptor", contextXml.getAbsolutePath())); } Context context = null; boolean isExternalWar = false; boolean isExternal = false; File expandedDocBase = null; try { synchronized (digester) { try { context = (Context) digester.parse(contextXml); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", contextXml.getAbsolutePath())); } finally { if (context == null) { context = new FailedContext(); } digester.reset(); } } Class<?> clazz = Class.forName(host.getConfigClass()); LifecycleListener listener = (LifecycleListener) clazz.newInstance(); context.addLifecycleListener(listener); context.setConfigFile(contextXml.toURI().toURL()); context.setName(cn.getName()); context.setPath(cn.getPath()); context.setWebappVersion(cn.getVersion()); // Add the associated docBase to the redeployed list if it's a WAR if (context.getDocBase() != null) { File docBase = new File(context.getDocBase()); if (!docBase.isAbsolute()) { docBase = new File(host.getAppBaseFile(), context.getDocBase()); } // If external docBase, register .xml as redeploy first if (!docBase.getCanonicalPath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) { isExternal = true; deployedApp.redeployResources.put( contextXml.getAbsolutePath(), Long.valueOf(contextXml.lastModified())); deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); if (docBase.getAbsolutePath().toLowerCase(Locale.ENGLISH).endsWith(".war")) { isExternalWar = true; } } else { log.warn(sm.getString("hostConfig.deployDescriptor.localDocBaseSpecified", docBase)); // Ignore specified docBase context.setDocBase(null); } } host.addChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.error(sm.getString("hostConfig.deployDescriptor.error", contextXml.getAbsolutePath()), t); } finally { // Get paths for WAR and expanded WAR in appBase // default to appBase dir + name expandedDocBase = new File(host.getAppBaseFile(), cn.getBaseName()); if (context.getDocBase() != null) { // first assume docBase is absolute expandedDocBase = new File(context.getDocBase()); if (!expandedDocBase.isAbsolute()) { // if docBase specified and relative, it must be relative to appBase expandedDocBase = new File(host.getAppBaseFile(), context.getDocBase()); } } // Add the eventual unpacked WAR and all the resources which will be // watched inside it if (isExternalWar && unpackWARs) { deployedApp.redeployResources.put(expandedDocBase.getAbsolutePath(), Long.valueOf(expandedDocBase.lastModified())); deployedApp.redeployResources.put(contextXml.getAbsolutePath(), Long.valueOf(contextXml.lastModified())); addWatchedResources(deployedApp, expandedDocBase.getAbsolutePath(), context); } else { // Find an existing matching war and expanded folder if (!isExternal) { File warDocBase = new File(expandedDocBase.getAbsolutePath() + ".war"); if (warDocBase.exists()) { deployedApp.redeployResources.put(warDocBase.getAbsolutePath(), Long.valueOf(warDocBase.lastModified())); } } if (expandedDocBase.exists()) { deployedApp.redeployResources.put(expandedDocBase.getAbsolutePath(), Long.valueOf(expandedDocBase.lastModified())); addWatchedResources(deployedApp, expandedDocBase.getAbsolutePath(), context); } else { addWatchedResources(deployedApp, null, context); } // Add the context XML to the list of files which should trigger a redeployment if (!isExternal) { deployedApp.redeployResources.put( contextXml.getAbsolutePath(), Long.valueOf(contextXml.lastModified())); } } // Add the global redeploy resources (which are never deleted) at // the end so they don't interfere with the deletion process addGlobalRedeployResources(deployedApp); } if (host.findChild(context.getName()) != null) { deployed.put(context.getName(), deployedApp); } } /** * Deploy WAR files. */ protected void deployWARs(File appBase, String[] files) { if (files == null) return; ExecutorService es = host.getStartStopExecutor(); List<Future<?>> results = new ArrayList<>(); for (int i = 0; i < files.length; i++) { if (files[i].equalsIgnoreCase("META-INF")) continue; if (files[i].equalsIgnoreCase("WEB-INF")) continue; File war = new File(appBase, files[i]); if (files[i].toLowerCase(Locale.ENGLISH).endsWith(".war") && war.isFile() && !invalidWars.contains(files[i]) ) { ContextName cn = new ContextName(files[i]); // Check for WARs with /../ /./ or similar sequences in the name if (!validateContextPath(appBase, cn.getBaseName())) { log.error(sm.getString( "hostConfig.illegalWarName", files[i])); invalidWars.add(files[i]); continue; } if (isServiced(cn.getName()) || deploymentExists(cn.getName())) continue; results.add(es.submit(new DeployWar(this, cn, war))); } } for (Future<?> result : results) { try { result.get(); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployWar.threaded.error"), e); } } } private boolean validateContextPath(File appBase, String contextPath) { // More complicated than the ideal as the canonical path may or may // not end with File.separator for a directory StringBuilder docBase; String canonicalDocBase = null; try { String canonicalAppBase = appBase.getCanonicalPath(); docBase = new StringBuilder(canonicalAppBase); if (canonicalAppBase.endsWith(File.separator)) { docBase.append(contextPath.substring(1).replace( '/', File.separatorChar)); } else { docBase.append(contextPath.replace('/', File.separatorChar)); } // At this point docBase should be canonical but will not end // with File.separator canonicalDocBase = (new File(docBase.toString())).getCanonicalPath(); // If the canonicalDocBase ends with File.separator, add one to // docBase before they are compared if (canonicalDocBase.endsWith(File.separator)) { docBase.append(File.separator); } } catch (IOException ioe) { return false; } // Compare the two. If they are not the same, the contextPath must // have /../ like sequences in it return canonicalDocBase.equals(docBase.toString()); } /** * @param cn * @param war */ protected void deployWAR(ContextName cn, File war) { // Checking for a nested /META-INF/context.xml JarFile jar = null; JarEntry entry = null; InputStream istream = null; FileOutputStream fos = null; BufferedOutputStream ostream = null; File xml; if (copyXML) { xml = new File(host.getConfigBaseFile(), cn.getBaseName() + ".xml"); } else { xml = new File(host.getAppBaseFile(), cn.getBaseName() + "/META-INF/context.xml"); } boolean xmlInWar = false; if (deployXML && !xml.exists()) { try { jar = new JarFile(war); entry = jar.getJarEntry(Constants.ApplicationContextXml); if (entry != null) { xmlInWar = true; } if ((copyXML || unpackWARs) && xmlInWar) { istream = jar.getInputStream(entry); fos = new FileOutputStream(xml); ostream = new BufferedOutputStream(fos, 1024); byte buffer[] = new byte[1024]; while (true) { int n = istream.read(buffer); if (n < 0) { break; } ostream.write(buffer, 0, n); } ostream.flush(); } } catch (IOException e) { /* Ignore */ } finally { if (ostream != null) { try { ostream.close(); } catch (IOException ioe) { // Ignore } ostream = null; } if (fos != null) { try { fos.close(); } catch (IOException ioe) { // Ignore } fos = null; } if (istream != null) { try { istream.close(); } catch (IOException ioe) { // Ignore } istream = null; } entry = null; if (jar != null) { try { jar.close(); } catch (IOException ioe) { // Ignore; } jar = null; } } } DeployedApplication deployedApp = new DeployedApplication(cn.getName()); // Deploy the application in this WAR file if(log.isInfoEnabled()) log.info(sm.getString("hostConfig.deployWar", war.getAbsolutePath())); Context context = null; try { if (deployXML && xml.exists()) { synchronized (digester) { try { context = (Context) digester.parse(xml); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", war.getAbsolutePath())); } finally { if (context == null) { context = new FailedContext(); } digester.reset(); } } context.setConfigFile(xml.toURI().toURL()); } else if (deployXML && xmlInWar) { synchronized (digester) { try { jar = new JarFile(war); entry = jar.getJarEntry(Constants.ApplicationContextXml); istream = jar.getInputStream(entry); context = (Context) digester.parse(istream); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", war.getAbsolutePath())); } finally { if (context == null) { context = new FailedContext(); } context.setConfigFile(new URL("jar:" + war.toURI().toString() + "!/" + Constants.ApplicationContextXml)); if (istream != null) { try { istream.close(); } catch (IOException e) { /* Ignore */ } istream = null; } entry = null; if (jar != null) { try { jar.close(); } catch (IOException e) { /* Ignore */ } jar = null; } digester.reset(); } } } else { context = (Context) Class.forName(contextClass).newInstance(); } // Populate redeploy resources with the WAR file deployedApp.redeployResources.put (war.getAbsolutePath(), Long.valueOf(war.lastModified())); if (deployXML && xml.exists() && copyXML) { deployedApp.redeployResources.put(xml.getAbsolutePath(), Long.valueOf(xml.lastModified())); } Class<?> clazz = Class.forName(host.getConfigClass()); LifecycleListener listener = (LifecycleListener) clazz.newInstance(); context.addLifecycleListener(listener); context.setName(cn.getName()); context.setPath(cn.getPath()); context.setWebappVersion(cn.getVersion()); context.setDocBase(cn.getBaseName() + ".war"); host.addChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.error(sm.getString("hostConfig.deployWar.error", war.getAbsolutePath()), t); } finally { // If we're unpacking WARs, the docBase will be mutated after // starting the context if (unpackWARs && context != null && context.getDocBase() != null) { File docBase = new File(host.getAppBaseFile(), cn.getBaseName()); deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); addWatchedResources(deployedApp, docBase.getAbsolutePath(), context); if (deployXML && !copyXML && (xmlInWar || xml.exists())) { deployedApp.redeployResources.put(xml.getAbsolutePath(), Long.valueOf(xml.lastModified())); } } else { addWatchedResources(deployedApp, null, context); } // Add the global redeploy resources (which are never deleted) at // the end so they don't interfere with the deletion process addGlobalRedeployResources(deployedApp); } deployed.put(cn.getName(), deployedApp); } /** * Deploy directories. */ protected void deployDirectories(File appBase, String[] files) { if (files == null) return; ExecutorService es = host.getStartStopExecutor(); List<Future<?>> results = new ArrayList<>(); for (int i = 0; i < files.length; i++) { if (files[i].equalsIgnoreCase("META-INF")) continue; if (files[i].equalsIgnoreCase("WEB-INF")) continue; File dir = new File(appBase, files[i]); if (dir.isDirectory()) { ContextName cn = new ContextName(files[i]); if (isServiced(cn.getName()) || deploymentExists(cn.getName())) continue; results.add(es.submit(new DeployDirectory(this, cn, dir))); } } for (Future<?> result : results) { try { result.get(); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDir.threaded.error"), e); } } } /** * @param cn * @param dir */ protected void deployDirectory(ContextName cn, File dir) { DeployedApplication deployedApp = new DeployedApplication(cn.getName()); // Deploy the application in this directory if( log.isInfoEnabled() ) log.info(sm.getString("hostConfig.deployDir", dir.getAbsolutePath())); Context context = null; File xml = new File(dir, Constants.ApplicationContextXml); File xmlCopy = null; try { if (deployXML && xml.exists()) { synchronized (digester) { try { context = (Context) digester.parse(xml); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", xml)); } finally { if (context == null) { context = new FailedContext(); } digester.reset(); } } if (copyXML) { xmlCopy = new File(host.getConfigBaseFile(), cn.getBaseName() + ".xml"); InputStream is = null; OutputStream os = null; try { is = new FileInputStream(xml); os = new FileOutputStream(xmlCopy); IOTools.flow(is, os); // Don't catch IOE - let the outer try/catch handle it } finally { try { if (is != null) is.close(); } catch (IOException e){ // Ignore } try { if (os != null) os.close(); } catch (IOException e){ // Ignore } } context.setConfigFile(xmlCopy.toURI().toURL()); } else { context.setConfigFile(xml.toURI().toURL()); } } else { context = (Context) Class.forName(contextClass).newInstance(); } Class<?> clazz = Class.forName(host.getConfigClass()); LifecycleListener listener = (LifecycleListener) clazz.newInstance(); context.addLifecycleListener(listener); context.setName(cn.getName()); context.setPath(cn.getPath()); context.setWebappVersion(cn.getVersion()); context.setDocBase(cn.getBaseName()); host.addChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.error(sm.getString("hostConfig.deployDir.error", dir.getAbsolutePath()), t); } finally { deployedApp.redeployResources.put(dir.getAbsolutePath(), Long.valueOf(dir.lastModified())); if (deployXML && xml.exists()) { if (xmlCopy == null) { deployedApp.redeployResources.put( xml.getAbsolutePath(), Long.valueOf(xml.lastModified())); } else { deployedApp.redeployResources.put( xmlCopy.getAbsolutePath(), Long.valueOf(xmlCopy.lastModified())); } } addWatchedResources(deployedApp, dir.getAbsolutePath(), context); // Add the global redeploy resources (which are never deleted) at // the end so they don't interfere with the deletion process addGlobalRedeployResources(deployedApp); } deployed.put(cn.getName(), deployedApp); } /** * Check if a webapp is already deployed in this host. * * @param contextName of the context which will be checked */ protected boolean deploymentExists(String contextName) { return (deployed.containsKey(contextName) || (host.findChild(contextName) != null)); } /** * Add watched resources to the specified Context. * @param app HostConfig deployed app * @param docBase web app docBase * @param context web application context */ protected void addWatchedResources(DeployedApplication app, String docBase, Context context) { // FIXME: Feature idea. Add support for patterns (ex: WEB-INF/*, // WEB-INF/*.xml), where we would only check if at least one // resource is newer than app.timestamp File docBaseFile = null; if (docBase != null) { docBaseFile = new File(docBase); if (!docBaseFile.isAbsolute()) { docBaseFile = new File(host.getAppBaseFile(), docBase); } } String[] watchedResources = context.findWatchedResources(); for (int i = 0; i < watchedResources.length; i++) { File resource = new File(watchedResources[i]); if (!resource.isAbsolute()) { if (docBase != null) { resource = new File(docBaseFile, watchedResources[i]); } else { if(log.isDebugEnabled()) log.debug("Ignoring non-existent WatchedResource '" + resource.getAbsolutePath() + "'"); continue; } } if(log.isDebugEnabled()) log.debug("Watching WatchedResource '" + resource.getAbsolutePath() + "'"); app.reloadResources.put(resource.getAbsolutePath(), Long.valueOf(resource.lastModified())); } } protected void addGlobalRedeployResources(DeployedApplication app) { // Redeploy resources processing is hard-coded to never delete this file File hostContextXml = new File(getConfigBaseName(), Constants.HostContextXml); if (hostContextXml.isFile()) { app.redeployResources.put(hostContextXml.getAbsolutePath(), Long.valueOf(hostContextXml.lastModified())); } // Redeploy resources in CATALINA_BASE/conf are never deleted File globalContextXml = returnCanonicalPath(Constants.DefaultContextXml); if (globalContextXml.isFile()) { app.redeployResources.put(globalContextXml.getAbsolutePath(), Long.valueOf(globalContextXml.lastModified())); } } /** * Check resources for redeployment and reloading. */ protected synchronized void checkResources(DeployedApplication app) { String[] resources = app.redeployResources.keySet().toArray(new String[0]); for (int i = 0; i < resources.length; i++) { File resource = new File(resources[i]); if (log.isDebugEnabled()) log.debug("Checking context[" + app.name + "] redeploy resource " + resource); if (resource.exists()) { long lastModified = app.redeployResources.get(resources[i]).longValue(); if ((!resource.isDirectory()) && resource.lastModified() > lastModified) { // Undeploy application if (log.isInfoEnabled()) log.info(sm.getString("hostConfig.undeploy", app.name)); Container context = host.findChild(app.name); try { host.removeChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.warn(sm.getString ("hostConfig.context.remove", app.name), t); } // Delete other redeploy resources for (int j = i + 1; j < resources.length; j++) { try { File current = new File(resources[j]); current = current.getCanonicalFile(); // Never delete per host context.xml defaults if (Constants.HostContextXml.equals( current.getName())) { continue; } // Only delete resources in the appBase or the // host's configBase if ((current.getAbsolutePath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) || (current.getAbsolutePath().startsWith( host.getConfigBaseFile().getAbsolutePath()))) { if (log.isDebugEnabled()) log.debug("Delete " + current); ExpandWar.delete(current); } } catch (IOException e) { log.warn(sm.getString ("hostConfig.canonicalizing", app.name), e); } } deployed.remove(app.name); return; } } else { // There is a chance the the resource was only missing // temporarily eg renamed during a text editor save try { Thread.sleep(500); } catch (InterruptedException e1) { // Ignore } // Recheck the resource to see if it was really deleted if (resource.exists()) { continue; } long lastModified = app.redeployResources.get(resources[i]).longValue(); if (lastModified == 0L) { continue; } // Undeploy application if (log.isInfoEnabled()) log.info(sm.getString("hostConfig.undeploy", app.name)); Container context = host.findChild(app.name); try { host.removeChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.warn(sm.getString ("hostConfig.context.remove", app.name), t); } // Delete all redeploy resources for (int j = i + 1; j < resources.length; j++) { try { File current = new File(resources[j]); current = current.getCanonicalFile(); // Never delete per host context.xml defaults if (Constants.HostContextXml.equals( current.getName())) { continue; } // Only delete resources in the appBase or the host's // configBase if ((current.getAbsolutePath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) || (current.getAbsolutePath().startsWith( host.getConfigBaseFile().getAbsolutePath()))) { if (log.isDebugEnabled()) log.debug("Delete " + current); ExpandWar.delete(current); } } catch (IOException e) { log.warn(sm.getString ("hostConfig.canonicalizing", app.name), e); } } // Delete reload resources as well (to remove any remaining .xml // descriptor) String[] resources2 = app.reloadResources.keySet().toArray(new String[0]); for (int j = 0; j < resources2.length; j++) { try { File current = new File(resources2[j]); current = current.getCanonicalFile(); // Never delete per host context.xml defaults if (Constants.HostContextXml.equals( current.getName())) { continue; } // Only delete resources in the appBase or the host's // configBase if ((current.getAbsolutePath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) || ((current.getAbsolutePath().startsWith( host.getConfigBaseFile().getAbsolutePath()) && (current.getAbsolutePath().endsWith(".xml"))))) { if (log.isDebugEnabled()) log.debug("Delete " + current); ExpandWar.delete(current); } } catch (IOException e) { log.warn(sm.getString ("hostConfig.canonicalizing", app.name), e); } } deployed.remove(app.name); return; } } resources = app.reloadResources.keySet().toArray(new String[0]); for (int i = 0; i < resources.length; i++) { File resource = new File(resources[i]); if (log.isDebugEnabled()) log.debug("Checking context[" + app.name + "] reload resource " + resource); long lastModified = app.reloadResources.get(resources[i]).longValue(); if ((!resource.exists() && lastModified != 0L) || (resource.lastModified() != lastModified)) { // Reload application if(log.isInfoEnabled()) log.info(sm.getString("hostConfig.reload", app.name)); Context context = (Context) host.findChild(app.name); if (context.getState().isAvailable()) { // Reload catches and logs exceptions context.reload(); } else { // If the context was not started (for example an error // in web.xml) we'll still get to try to start try { context.start(); } catch (Exception e) { log.warn(sm.getString ("hostConfig.context.restart", app.name), e); } } // Update times app.reloadResources.put(resources[i], Long.valueOf(resource.lastModified())); app.timestamp = System.currentTimeMillis(); return; } } } /** * Process a "start" event for this Host. */ public void start() { if (log.isDebugEnabled()) log.debug(sm.getString("hostConfig.start")); try { ObjectName hostON = host.getObjectName(); oname = new ObjectName (hostON.getDomain() + ":type=Deployer,host=" + host.getName()); Registry.getRegistry(null, null).registerComponent (this, oname, this.getClass().getName()); } catch (Exception e) { log.error(sm.getString("hostConfig.jmx.register", oname), e); } if (host.getCreateDirs()) { File[] dirs = new File[] {host.getAppBaseFile(),host.getConfigBaseFile()}; for (int i=0; i<dirs.length; i++) { if (!dirs[i].mkdirs() && !dirs[i].isDirectory()) { log.error(sm.getString("hostConfig.createDirs",dirs[i])); } } } if (!host.getAppBaseFile().isDirectory()) { log.error(sm.getString("hostConfig.appBase", host.getName(), host.getAppBaseFile().getPath())); host.setDeployOnStartup(false); host.setAutoDeploy(false); } if (host.getDeployOnStartup()) deployApps(); } /** * Process a "stop" event for this Host. */ public void stop() { if (log.isDebugEnabled()) log.debug(sm.getString("hostConfig.stop")); if (oname != null) { try { Registry.getRegistry(null, null).unregisterComponent(oname); } catch (Exception e) { log.error(sm.getString("hostConfig.jmx.unregister", oname), e); } } oname = null; } /** * Check status of all webapps. */ protected void check() { if (host.getAutoDeploy()) { // Check for resources modification to trigger redeployment DeployedApplication[] apps = deployed.values().toArray(new DeployedApplication[0]); for (int i = 0; i < apps.length; i++) { if (!isServiced(apps[i].name)) checkResources(apps[i]); } // Hotdeploy applications deployApps(); } } /** * Check status of a specific webapp, for use with stuff like management webapps. */ public void check(String name) { DeployedApplication app = deployed.get(name); if (app != null) { checkResources(app); } else { deployApps(name); } } /** * Add a new Context to be managed by us. * Entry point for the admin webapp, and other JMX Context controllers. */ public void manageApp(Context context) { String contextName = context.getName(); if (deployed.containsKey(contextName)) return; DeployedApplication deployedApp = new DeployedApplication(contextName); // Add the associated docBase to the redeployed list if it's a WAR boolean isWar = false; if (context.getDocBase() != null) { File docBase = new File(context.getDocBase()); if (!docBase.isAbsolute()) { docBase = new File(host.getAppBaseFile(), context.getDocBase()); } deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); if (docBase.getAbsolutePath().toLowerCase(Locale.ENGLISH).endsWith(".war")) { isWar = true; } } host.addChild(context); // Add the eventual unpacked WAR and all the resources which will be // watched inside it if (isWar && unpackWARs) { File docBase = new File(host.getAppBaseFile(), context.getBaseName()); deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); addWatchedResources(deployedApp, docBase.getAbsolutePath(), context); } else { addWatchedResources(deployedApp, null, context); } deployed.put(contextName, deployedApp); } /** * Remove a webapp from our control. * Entry point for the admin webapp, and other JMX Context controllers. */ public void unmanageApp(String contextName) { if(isServiced(contextName)) { deployed.remove(contextName); host.removeChild(host.findChild(contextName)); } } // ----------------------------------------------------- Instance Variables /** * This class represents the state of a deployed application, as well as * the monitored resources. */ protected static class DeployedApplication { public DeployedApplication(String name) { this.name = name; } /** * Application context path. The assertion is that * (host.getChild(name) != null). */ public final String name; /** * Any modification of the specified (static) resources will cause a * redeployment of the application. If any of the specified resources is * removed, the application will be undeployed. Typically, this will * contain resources like the context.xml file, a compressed WAR path. * The value is the last modification time. */ public final LinkedHashMap<String, Long> redeployResources = new LinkedHashMap<>(); /** * Any modification of the specified (static) resources will cause a * reload of the application. This will typically contain resources * such as the web.xml of a webapp, but can be configured to contain * additional descriptors. * The value is the last modification time. */ public final HashMap<String, Long> reloadResources = new HashMap<>(); /** * Instant where the application was last put in service. */ public long timestamp = System.currentTimeMillis(); } private static class DeployDescriptor implements Runnable { private HostConfig config; private ContextName cn; private File descriptor; public DeployDescriptor(HostConfig config, ContextName cn, File descriptor) { this.config = config; this.cn = cn; this.descriptor= descriptor; } @Override public void run() { config.deployDescriptor(cn, descriptor); } } private static class DeployWar implements Runnable { private HostConfig config; private ContextName cn; private File war; public DeployWar(HostConfig config, ContextName cn, File war) { this.config = config; this.cn = cn; this.war = war; } @Override public void run() { config.deployWAR(cn, war); } } private static class DeployDirectory implements Runnable { private HostConfig config; private ContextName cn; private File dir; public DeployDirectory(HostConfig config, ContextName cn, File dir) { this.config = config; this.cn = cn; this.dir = dir; } @Override public void run() { config.deployDirectory(cn, dir); } } }
java/org/apache/catalina/startup/HostConfig.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.startup; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.management.ObjectName; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Host; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleEvent; import org.apache.catalina.LifecycleListener; import org.apache.catalina.core.StandardHost; import org.apache.catalina.util.ContextName; import org.apache.catalina.util.IOTools; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.digester.Digester; import org.apache.tomcat.util.modeler.Registry; import org.apache.tomcat.util.res.StringManager; /** * Startup event listener for a <b>Host</b> that configures the properties * of that Host, and the associated defined contexts. * * @author Craig R. McClanahan * @author Remy Maucherat * @version $Id$ */ public class HostConfig implements LifecycleListener { private static final Log log = LogFactory.getLog( HostConfig.class ); // ----------------------------------------------------- Instance Variables /** * The Java class name of the Context configuration class we should use. */ protected String configClass = "org.apache.catalina.startup.ContextConfig"; /** * The Java class name of the Context implementation we should use. */ protected String contextClass = "org.apache.catalina.core.StandardContext"; /** * The Host we are associated with. */ protected Host host = null; /** * The JMX ObjectName of this component. */ protected ObjectName oname = null; /** * The string resources for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); /** * Should we deploy XML Context config files packaged with WAR files and * directories? */ protected boolean deployXML = false; /** * Should XML files be copied to * $CATALINA_BASE/conf/&lt;engine&gt;/&lt;host&gt; by default when * a web application is deployed? */ protected boolean copyXML = false; /** * Should we unpack WAR files when auto-deploying applications in the * <code>appBase</code> directory? */ protected boolean unpackWARs = false; /** * Map of deployed applications. */ protected final Map<String, DeployedApplication> deployed = new ConcurrentHashMap<>(); /** * List of applications which are being serviced, and shouldn't be * deployed/undeployed/redeployed at the moment. */ protected final ArrayList<String> serviced = new ArrayList<>(); /** * The <code>Digester</code> instance used to parse context descriptors. */ protected static final Digester digester = createDigester(); /** * The list of Wars in the appBase to be ignored because they are invalid * (e.g. contain /../ sequences). */ protected final Set<String> invalidWars = new HashSet<>(); // ------------------------------------------------------------- Properties /** * Return the Context configuration class name. */ public String getConfigClass() { return (this.configClass); } /** * Set the Context configuration class name. * * @param configClass The new Context configuration class name. */ public void setConfigClass(String configClass) { this.configClass = configClass; } /** * Return the Context implementation class name. */ public String getContextClass() { return (this.contextClass); } /** * Set the Context implementation class name. * * @param contextClass The new Context implementation class name. */ public void setContextClass(String contextClass) { this.contextClass = contextClass; } /** * Return the deploy XML config file flag for this component. */ public boolean isDeployXML() { return (this.deployXML); } /** * Set the deploy XML config file flag for this component. * * @param deployXML The new deploy XML flag */ public void setDeployXML(boolean deployXML) { this.deployXML= deployXML; } /** * Return the copy XML config file flag for this component. */ public boolean isCopyXML() { return (this.copyXML); } /** * Set the copy XML config file flag for this component. * * @param copyXML The new copy XML flag */ public void setCopyXML(boolean copyXML) { this.copyXML= copyXML; } /** * Return the unpack WARs flag. */ public boolean isUnpackWARs() { return (this.unpackWARs); } /** * Set the unpack WARs flag. * * @param unpackWARs The new unpack WARs flag */ public void setUnpackWARs(boolean unpackWARs) { this.unpackWARs = unpackWARs; } // --------------------------------------------------------- Public Methods /** * Process the START event for an associated Host. * * @param event The lifecycle event that has occurred */ @Override public void lifecycleEvent(LifecycleEvent event) { if (event.getType().equals(Lifecycle.PERIODIC_EVENT)) check(); // Identify the host we are associated with try { host = (Host) event.getLifecycle(); if (host instanceof StandardHost) { setCopyXML(((StandardHost) host).isCopyXML()); setDeployXML(((StandardHost) host).isDeployXML()); setUnpackWARs(((StandardHost) host).isUnpackWARs()); } } catch (ClassCastException e) { log.error(sm.getString("hostConfig.cce", event.getLifecycle()), e); return; } // Process the event that has occurred if (event.getType().equals(Lifecycle.START_EVENT)) start(); else if (event.getType().equals(Lifecycle.STOP_EVENT)) stop(); } /** * Add a serviced application to the list. */ public synchronized void addServiced(String name) { serviced.add(name); } /** * Is application serviced ? * @return state of the application */ public synchronized boolean isServiced(String name) { return (serviced.contains(name)); } /** * Removed a serviced application from the list. */ public synchronized void removeServiced(String name) { serviced.remove(name); } /** * Get the instant where an application was deployed. * @return 0L if no application with that name is deployed, or the instant * on which the application was deployed */ public long getDeploymentTime(String name) { DeployedApplication app = deployed.get(name); if (app == null) { return 0L; } return app.timestamp; } /** * Has the specified application been deployed? Note applications defined * in server.xml will not have been deployed. * @return <code>true</code> if the application has been deployed and * <code>false</code> if the application has not been deployed or does not * exist */ public boolean isDeployed(String name) { DeployedApplication app = deployed.get(name); if (app == null) { return false; } return true; } // ------------------------------------------------------ Protected Methods /** * Create the digester which will be used to parse context config files. */ protected static Digester createDigester() { Digester digester = new Digester(); digester.setValidating(false); // Add object creation rule digester.addObjectCreate("Context", "org.apache.catalina.core.StandardContext", "className"); // Set the properties on that object (it doesn't matter if extra // properties are set) digester.addSetProperties("Context"); return (digester); } protected File returnCanonicalPath(String path) { File file = new File(path); if (!file.isAbsolute()) file = new File(host.getCatalinaBase(), path); try { return file.getCanonicalFile(); } catch (IOException e) { return file; } } /** * Get the name of the configBase. * For use with JMX management. */ public String getConfigBaseName() { return host.getConfigBaseFile().getAbsolutePath(); } /** * Deploy applications for any directories or WAR files that are found * in our "application root" directory. */ protected void deployApps() { File appBase = host.getAppBaseFile(); File configBase = host.getConfigBaseFile(); String[] filteredAppPaths = filterAppPaths(appBase.list()); // Deploy XML descriptors from configBase deployDescriptors(configBase, configBase.list()); // Deploy WARs deployWARs(appBase, filteredAppPaths); // Deploy expanded folders deployDirectories(appBase, filteredAppPaths); } /** * Filter the list of application file paths to remove those that match * the regular expression defined by {@link Host#getDeployIgnore()}. * * @param unfilteredAppPaths The list of application paths to filtert * * @return The filtered list of application paths */ protected String[] filterAppPaths(String[] unfilteredAppPaths) { Pattern filter = host.getDeployIgnorePattern(); if (filter == null) { return unfilteredAppPaths; } List<String> filteredList = new ArrayList<>(); Matcher matcher = null; for (String appPath : unfilteredAppPaths) { if (matcher == null) { matcher = filter.matcher(appPath); } else { matcher.reset(appPath); } if (matcher.matches()) { if (log.isDebugEnabled()) { log.debug(sm.getString("hostConfig.ignorePath", appPath)); } } else { filteredList.add(appPath); } } return filteredList.toArray(new String[filteredList.size()]); } /** * Deploy applications for any directories or WAR files that are found * in our "application root" directory. */ protected void deployApps(String name) { File appBase = host.getAppBaseFile(); File configBase = host.getConfigBaseFile(); ContextName cn = new ContextName(name); String baseName = cn.getBaseName(); if (deploymentExists(baseName)) { return; } // Deploy XML descriptor from configBase File xml = new File(configBase, baseName + ".xml"); if (xml.exists()) { deployDescriptor(cn, xml); return; } // Deploy WAR File war = new File(appBase, baseName + ".war"); if (war.exists()) { deployWAR(cn, war); return; } // Deploy expanded folder File dir = new File(appBase, baseName); if (dir.exists()) deployDirectory(cn, dir); } /** * Deploy XML context descriptors. */ protected void deployDescriptors(File configBase, String[] files) { if (files == null) return; ExecutorService es = host.getStartStopExecutor(); List<Future<?>> results = new ArrayList<>(); for (int i = 0; i < files.length; i++) { File contextXml = new File(configBase, files[i]); if (files[i].toLowerCase(Locale.ENGLISH).endsWith(".xml")) { ContextName cn = new ContextName(files[i]); if (isServiced(cn.getName()) || deploymentExists(cn.getName())) continue; results.add( es.submit(new DeployDescriptor(this, cn, contextXml))); } } for (Future<?> result : results) { try { result.get(); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.threaded.error"), e); } } } /** * @param cn * @param contextXml */ protected void deployDescriptor(ContextName cn, File contextXml) { DeployedApplication deployedApp = new DeployedApplication(cn.getName()); // Assume this is a configuration descriptor and deploy it if(log.isInfoEnabled()) { log.info(sm.getString("hostConfig.deployDescriptor", contextXml.getAbsolutePath())); } Context context = null; boolean isExternalWar = false; boolean isExternal = false; File expandedDocBase = null; try { synchronized (digester) { try { context = (Context) digester.parse(contextXml); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", contextXml.getAbsolutePath())); } finally { if (context == null) { context = new FailedContext(); } digester.reset(); } } Class<?> clazz = Class.forName(host.getConfigClass()); LifecycleListener listener = (LifecycleListener) clazz.newInstance(); context.addLifecycleListener(listener); context.setConfigFile(contextXml.toURI().toURL()); context.setName(cn.getName()); context.setPath(cn.getPath()); context.setWebappVersion(cn.getVersion()); // Add the associated docBase to the redeployed list if it's a WAR if (context.getDocBase() != null) { File docBase = new File(context.getDocBase()); if (!docBase.isAbsolute()) { docBase = new File(host.getAppBaseFile(), context.getDocBase()); } // If external docBase, register .xml as redeploy first if (!docBase.getCanonicalPath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) { isExternal = true; deployedApp.redeployResources.put( contextXml.getAbsolutePath(), Long.valueOf(contextXml.lastModified())); deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); if (docBase.getAbsolutePath().toLowerCase(Locale.ENGLISH).endsWith(".war")) { isExternalWar = true; } } else { log.warn(sm.getString("hostConfig.deployDescriptor.localDocBaseSpecified", docBase)); // Ignore specified docBase context.setDocBase(null); } } host.addChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.error(sm.getString("hostConfig.deployDescriptor.error", contextXml.getAbsolutePath()), t); } finally { // Get paths for WAR and expanded WAR in appBase // default to appBase dir + name expandedDocBase = new File(host.getAppBaseFile(), cn.getBaseName()); if (context.getDocBase() != null) { // first assume docBase is absolute expandedDocBase = new File(context.getDocBase()); if (!expandedDocBase.isAbsolute()) { // if docBase specified and relative, it must be relative to appBase expandedDocBase = new File(host.getAppBaseFile(), context.getDocBase()); } } // Add the eventual unpacked WAR and all the resources which will be // watched inside it if (isExternalWar && unpackWARs) { deployedApp.redeployResources.put(expandedDocBase.getAbsolutePath(), Long.valueOf(expandedDocBase.lastModified())); deployedApp.redeployResources.put(contextXml.getAbsolutePath(), Long.valueOf(contextXml.lastModified())); addWatchedResources(deployedApp, expandedDocBase.getAbsolutePath(), context); } else { // Find an existing matching war and expanded folder if (!isExternal) { File warDocBase = new File(expandedDocBase.getAbsolutePath() + ".war"); if (warDocBase.exists()) { deployedApp.redeployResources.put(warDocBase.getAbsolutePath(), Long.valueOf(warDocBase.lastModified())); } } if (expandedDocBase.exists()) { deployedApp.redeployResources.put(expandedDocBase.getAbsolutePath(), Long.valueOf(expandedDocBase.lastModified())); addWatchedResources(deployedApp, expandedDocBase.getAbsolutePath(), context); } else { addWatchedResources(deployedApp, null, context); } // Add the context XML to the list of files which should trigger a redeployment if (!isExternal) { deployedApp.redeployResources.put( contextXml.getAbsolutePath(), Long.valueOf(contextXml.lastModified())); } } // Add the global redeploy resources (which are never deleted) at // the end so they don't interfere with the deletion process addGlobalRedeployResources(deployedApp); } if (host.findChild(context.getName()) != null) { deployed.put(context.getName(), deployedApp); } } /** * Deploy WAR files. */ protected void deployWARs(File appBase, String[] files) { if (files == null) return; ExecutorService es = host.getStartStopExecutor(); List<Future<?>> results = new ArrayList<>(); for (int i = 0; i < files.length; i++) { if (files[i].equalsIgnoreCase("META-INF")) continue; if (files[i].equalsIgnoreCase("WEB-INF")) continue; File war = new File(appBase, files[i]); if (files[i].toLowerCase(Locale.ENGLISH).endsWith(".war") && war.isFile() && !invalidWars.contains(files[i]) ) { ContextName cn = new ContextName(files[i]); // Check for WARs with /../ /./ or similar sequences in the name if (!validateContextPath(appBase, cn.getBaseName())) { log.error(sm.getString( "hostConfig.illegalWarName", files[i])); invalidWars.add(files[i]); continue; } if (isServiced(cn.getName()) || deploymentExists(cn.getName())) continue; results.add(es.submit(new DeployWar(this, cn, war))); } } for (Future<?> result : results) { try { result.get(); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployWar.threaded.error"), e); } } } private boolean validateContextPath(File appBase, String contextPath) { // More complicated than the ideal as the canonical path may or may // not end with File.separator for a directory StringBuilder docBase; String canonicalDocBase = null; try { String canonicalAppBase = appBase.getCanonicalPath(); docBase = new StringBuilder(canonicalAppBase); if (canonicalAppBase.endsWith(File.separator)) { docBase.append(contextPath.substring(1).replace( '/', File.separatorChar)); } else { docBase.append(contextPath.replace('/', File.separatorChar)); } // At this point docBase should be canonical but will not end // with File.separator canonicalDocBase = (new File(docBase.toString())).getCanonicalPath(); // If the canonicalDocBase ends with File.separator, add one to // docBase before they are compared if (canonicalDocBase.endsWith(File.separator)) { docBase.append(File.separator); } } catch (IOException ioe) { return false; } // Compare the two. If they are not the same, the contextPath must // have /../ like sequences in it return canonicalDocBase.equals(docBase.toString()); } /** * @param cn * @param war */ protected void deployWAR(ContextName cn, File war) { // Checking for a nested /META-INF/context.xml JarFile jar = null; JarEntry entry = null; InputStream istream = null; FileOutputStream fos = null; BufferedOutputStream ostream = null; File xml; if (copyXML) { xml = new File(host.getConfigBaseFile(), cn.getBaseName() + ".xml"); } else { xml = new File(host.getAppBaseFile(), cn.getBaseName() + "/META-INF/context.xml"); } boolean xmlInWar = false; if (deployXML && !xml.exists()) { try { jar = new JarFile(war); entry = jar.getJarEntry(Constants.ApplicationContextXml); if (entry != null) { xmlInWar = true; } if ((copyXML || unpackWARs) && xmlInWar) { istream = jar.getInputStream(entry); fos = new FileOutputStream(xml); ostream = new BufferedOutputStream(fos, 1024); byte buffer[] = new byte[1024]; while (true) { int n = istream.read(buffer); if (n < 0) { break; } ostream.write(buffer, 0, n); } ostream.flush(); } } catch (IOException e) { /* Ignore */ } finally { if (ostream != null) { try { ostream.close(); } catch (IOException ioe) { // Ignore } ostream = null; } if (fos != null) { try { fos.close(); } catch (IOException ioe) { // Ignore } fos = null; } if (istream != null) { try { istream.close(); } catch (IOException ioe) { // Ignore } istream = null; } entry = null; if (jar != null) { try { jar.close(); } catch (IOException ioe) { // Ignore; } jar = null; } } } DeployedApplication deployedApp = new DeployedApplication(cn.getName()); // Deploy the application in this WAR file if(log.isInfoEnabled()) log.info(sm.getString("hostConfig.deployWar", war.getAbsolutePath())); Context context = null; try { if (deployXML && xml.exists()) { synchronized (digester) { try { context = (Context) digester.parse(xml); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", war.getAbsolutePath())); } finally { if (context == null) { context = new FailedContext(); } digester.reset(); } } context.setConfigFile(xml.toURI().toURL()); } else if (deployXML && xmlInWar) { synchronized (digester) { try { jar = new JarFile(war); entry = jar.getJarEntry(Constants.ApplicationContextXml); istream = jar.getInputStream(entry); context = (Context) digester.parse(istream); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", war.getAbsolutePath())); } finally { if (context == null) { context = new FailedContext(); } context.setConfigFile(new URL("jar:" + war.toURI().toString() + "!/" + Constants.ApplicationContextXml)); if (istream != null) { try { istream.close(); } catch (IOException e) { /* Ignore */ } istream = null; } entry = null; if (jar != null) { try { jar.close(); } catch (IOException e) { /* Ignore */ } jar = null; } digester.reset(); } } } else { context = (Context) Class.forName(contextClass).newInstance(); } // Populate redeploy resources with the WAR file deployedApp.redeployResources.put (war.getAbsolutePath(), Long.valueOf(war.lastModified())); if (deployXML && xml.exists() && copyXML) { deployedApp.redeployResources.put(xml.getAbsolutePath(), Long.valueOf(xml.lastModified())); } Class<?> clazz = Class.forName(host.getConfigClass()); LifecycleListener listener = (LifecycleListener) clazz.newInstance(); context.addLifecycleListener(listener); context.setName(cn.getName()); context.setPath(cn.getPath()); context.setWebappVersion(cn.getVersion()); context.setDocBase(cn.getBaseName() + ".war"); host.addChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.error(sm.getString("hostConfig.deployWar.error", war.getAbsolutePath()), t); } finally { // If we're unpacking WARs, the docBase will be mutated after // starting the context if (unpackWARs && context != null && context.getDocBase() != null) { File docBase = new File(host.getAppBaseFile(), cn.getBaseName()); deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); addWatchedResources(deployedApp, docBase.getAbsolutePath(), context); if (deployXML && !copyXML && (xmlInWar || xml.exists())) { deployedApp.redeployResources.put(xml.getAbsolutePath(), Long.valueOf(xml.lastModified())); } } else { addWatchedResources(deployedApp, null, context); } // Add the global redeploy resources (which are never deleted) at // the end so they don't interfere with the deletion process addGlobalRedeployResources(deployedApp); } deployed.put(cn.getName(), deployedApp); } /** * Deploy directories. */ protected void deployDirectories(File appBase, String[] files) { if (files == null) return; ExecutorService es = host.getStartStopExecutor(); List<Future<?>> results = new ArrayList<>(); for (int i = 0; i < files.length; i++) { if (files[i].equalsIgnoreCase("META-INF")) continue; if (files[i].equalsIgnoreCase("WEB-INF")) continue; File dir = new File(appBase, files[i]); if (dir.isDirectory()) { ContextName cn = new ContextName(files[i]); if (isServiced(cn.getName()) || deploymentExists(cn.getName())) continue; results.add(es.submit(new DeployDirectory(this, cn, dir))); } } for (Future<?> result : results) { try { result.get(); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDir.threaded.error"), e); } } } /** * @param cn * @param dir */ protected void deployDirectory(ContextName cn, File dir) { DeployedApplication deployedApp = new DeployedApplication(cn.getName()); // Deploy the application in this directory if( log.isInfoEnabled() ) log.info(sm.getString("hostConfig.deployDir", dir.getAbsolutePath())); Context context = null; File xml = new File(dir, Constants.ApplicationContextXml); File xmlCopy = null; try { if (deployXML && xml.exists()) { synchronized (digester) { try { context = (Context) digester.parse(xml); } catch (Exception e) { log.error(sm.getString( "hostConfig.deployDescriptor.error", xml)); } finally { if (context == null) { context = new FailedContext(); } digester.reset(); } } if (copyXML) { xmlCopy = new File(host.getConfigBaseFile(), cn.getBaseName() + ".xml"); InputStream is = null; OutputStream os = null; try { is = new FileInputStream(xml); os = new FileOutputStream(xmlCopy); IOTools.flow(is, os); // Don't catch IOE - let the outer try/catch handle it } finally { try { if (is != null) is.close(); } catch (IOException e){ // Ignore } try { if (os != null) os.close(); } catch (IOException e){ // Ignore } } context.setConfigFile(xmlCopy.toURI().toURL()); } else { context.setConfigFile(xml.toURI().toURL()); } } else { context = (Context) Class.forName(contextClass).newInstance(); } Class<?> clazz = Class.forName(host.getConfigClass()); LifecycleListener listener = (LifecycleListener) clazz.newInstance(); context.addLifecycleListener(listener); context.setName(cn.getName()); context.setPath(cn.getPath()); context.setWebappVersion(cn.getVersion()); context.setDocBase(cn.getBaseName()); host.addChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.error(sm.getString("hostConfig.deployDir.error", dir.getAbsolutePath()), t); } finally { deployedApp.redeployResources.put(dir.getAbsolutePath(), Long.valueOf(dir.lastModified())); if (deployXML && xml.exists()) { if (xmlCopy == null) { deployedApp.redeployResources.put( xml.getAbsolutePath(), Long.valueOf(xml.lastModified())); } else { deployedApp.redeployResources.put( xmlCopy.getAbsolutePath(), Long.valueOf(xmlCopy.lastModified())); } } addWatchedResources(deployedApp, dir.getAbsolutePath(), context); // Add the global redeploy resources (which are never deleted) at // the end so they don't interfere with the deletion process addGlobalRedeployResources(deployedApp); } deployed.put(cn.getName(), deployedApp); } /** * Check if a webapp is already deployed in this host. * * @param contextName of the context which will be checked */ protected boolean deploymentExists(String contextName) { return (deployed.containsKey(contextName) || (host.findChild(contextName) != null)); } /** * Add watched resources to the specified Context. * @param app HostConfig deployed app * @param docBase web app docBase * @param context web application context */ protected void addWatchedResources(DeployedApplication app, String docBase, Context context) { // FIXME: Feature idea. Add support for patterns (ex: WEB-INF/*, // WEB-INF/*.xml), where we would only check if at least one // resource is newer than app.timestamp File docBaseFile = null; if (docBase != null) { docBaseFile = new File(docBase); if (!docBaseFile.isAbsolute()) { docBaseFile = new File(host.getAppBaseFile(), docBase); } } String[] watchedResources = context.findWatchedResources(); for (int i = 0; i < watchedResources.length; i++) { File resource = new File(watchedResources[i]); if (!resource.isAbsolute()) { if (docBase != null) { resource = new File(docBaseFile, watchedResources[i]); } else { if(log.isDebugEnabled()) log.debug("Ignoring non-existent WatchedResource '" + resource.getAbsolutePath() + "'"); continue; } } if(log.isDebugEnabled()) log.debug("Watching WatchedResource '" + resource.getAbsolutePath() + "'"); app.reloadResources.put(resource.getAbsolutePath(), Long.valueOf(resource.lastModified())); } } protected void addGlobalRedeployResources(DeployedApplication app) { // Redeploy resources processing is hard-coded to never delete this file File hostContextXml = new File(getConfigBaseName(), Constants.HostContextXml); if (hostContextXml.isFile()) { app.redeployResources.put(hostContextXml.getAbsolutePath(), Long.valueOf(hostContextXml.lastModified())); } // Redeploy resources in CATALINA_BASE/conf are never deleted File globalContextXml = returnCanonicalPath(Constants.DefaultContextXml); if (globalContextXml.isFile()) { app.redeployResources.put(globalContextXml.getAbsolutePath(), Long.valueOf(globalContextXml.lastModified())); } } /** * Check resources for redeployment and reloading. */ protected synchronized void checkResources(DeployedApplication app) { String[] resources = app.redeployResources.keySet().toArray(new String[0]); for (int i = 0; i < resources.length; i++) { File resource = new File(resources[i]); if (log.isDebugEnabled()) log.debug("Checking context[" + app.name + "] redeploy resource " + resource); if (resource.exists()) { long lastModified = app.redeployResources.get(resources[i]).longValue(); if ((!resource.isDirectory()) && resource.lastModified() > lastModified) { // Undeploy application if (log.isInfoEnabled()) log.info(sm.getString("hostConfig.undeploy", app.name)); Container context = host.findChild(app.name); try { host.removeChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.warn(sm.getString ("hostConfig.context.remove", app.name), t); } // Delete other redeploy resources for (int j = i + 1; j < resources.length; j++) { try { File current = new File(resources[j]); current = current.getCanonicalFile(); // Never delete per host context.xml defaults if (Constants.HostContextXml.equals( current.getName())) { continue; } // Only delete resources in the appBase or the // host's configBase if ((current.getAbsolutePath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) || (current.getAbsolutePath().startsWith( host.getConfigBaseFile().getAbsolutePath()))) { if (log.isDebugEnabled()) log.debug("Delete " + current); ExpandWar.delete(current); } } catch (IOException e) { log.warn(sm.getString ("hostConfig.canonicalizing", app.name), e); } } deployed.remove(app.name); return; } } else { // There is a chance the the resource was only missing // temporarily eg renamed during a text editor save try { Thread.sleep(500); } catch (InterruptedException e1) { // Ignore } // Recheck the resource to see if it was really deleted if (resource.exists()) { continue; } long lastModified = app.redeployResources.get(resources[i]).longValue(); if (lastModified == 0L) { continue; } // Undeploy application if (log.isInfoEnabled()) log.info(sm.getString("hostConfig.undeploy", app.name)); Container context = host.findChild(app.name); try { host.removeChild(context); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.warn(sm.getString ("hostConfig.context.remove", app.name), t); } // Delete all redeploy resources for (int j = i + 1; j < resources.length; j++) { try { File current = new File(resources[j]); current = current.getCanonicalFile(); // Never delete per host context.xml defaults if (Constants.HostContextXml.equals( current.getName())) { continue; } // Only delete resources in the appBase or the host's // configBase if ((current.getAbsolutePath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) || (current.getAbsolutePath().startsWith( host.getConfigBaseFile().getAbsolutePath()))) { if (log.isDebugEnabled()) log.debug("Delete " + current); ExpandWar.delete(current); } } catch (IOException e) { log.warn(sm.getString ("hostConfig.canonicalizing", app.name), e); } } // Delete reload resources as well (to remove any remaining .xml // descriptor) String[] resources2 = app.reloadResources.keySet().toArray(new String[0]); for (int j = 0; j < resources2.length; j++) { try { File current = new File(resources2[j]); current = current.getCanonicalFile(); // Never delete per host context.xml defaults if (Constants.HostContextXml.equals( current.getName())) { continue; } // Only delete resources in the appBase or the host's // configBase if ((current.getAbsolutePath().startsWith( host.getAppBaseFile().getAbsolutePath() + File.separator)) || ((current.getAbsolutePath().startsWith( host.getConfigBaseFile().getAbsolutePath()) && (current.getAbsolutePath().endsWith(".xml"))))) { if (log.isDebugEnabled()) log.debug("Delete " + current); ExpandWar.delete(current); } } catch (IOException e) { log.warn(sm.getString ("hostConfig.canonicalizing", app.name), e); } } deployed.remove(app.name); return; } } resources = app.reloadResources.keySet().toArray(new String[0]); for (int i = 0; i < resources.length; i++) { File resource = new File(resources[i]); if (log.isDebugEnabled()) log.debug("Checking context[" + app.name + "] reload resource " + resource); long lastModified = app.reloadResources.get(resources[i]).longValue(); if ((!resource.exists() && lastModified != 0L) || (resource.lastModified() != lastModified)) { // Reload application if(log.isInfoEnabled()) log.info(sm.getString("hostConfig.reload", app.name)); Context context = (Context) host.findChild(app.name); if (context.getState().isAvailable()) { // Reload catches and logs exceptions context.reload(); } else { // If the context was not started (for example an error // in web.xml) we'll still get to try to start try { context.start(); } catch (Exception e) { log.warn(sm.getString ("hostConfig.context.restart", app.name), e); } } // Update times app.reloadResources.put(resources[i], Long.valueOf(resource.lastModified())); app.timestamp = System.currentTimeMillis(); return; } } } /** * Process a "start" event for this Host. */ public void start() { if (log.isDebugEnabled()) log.debug(sm.getString("hostConfig.start")); try { ObjectName hostON = host.getObjectName(); oname = new ObjectName (hostON.getDomain() + ":type=Deployer,host=" + host.getName()); Registry.getRegistry(null, null).registerComponent (this, oname, this.getClass().getName()); } catch (Exception e) { log.error(sm.getString("hostConfig.jmx.register", oname), e); } if (host.getCreateDirs()) { File[] dirs = new File[] {host.getAppBaseFile(),host.getConfigBaseFile()}; for (int i=0; i<dirs.length; i++) { if (!dirs[i].mkdirs() && !dirs[i].isDirectory()) { log.error(sm.getString("hostConfig.createDirs",dirs[i])); } } } if (!host.getAppBaseFile().isDirectory()) { log.error(sm.getString("hostConfig.appBase", host.getName(), host.getAppBaseFile().getPath())); host.setDeployOnStartup(false); host.setAutoDeploy(false); } if (host.getDeployOnStartup()) deployApps(); } /** * Process a "stop" event for this Host. */ public void stop() { if (log.isDebugEnabled()) log.debug(sm.getString("hostConfig.stop")); if (oname != null) { try { Registry.getRegistry(null, null).unregisterComponent(oname); } catch (Exception e) { log.error(sm.getString("hostConfig.jmx.unregister", oname), e); } } oname = null; } /** * Check status of all webapps. */ protected void check() { if (host.getAutoDeploy()) { // Check for resources modification to trigger redeployment DeployedApplication[] apps = deployed.values().toArray(new DeployedApplication[0]); for (int i = 0; i < apps.length; i++) { if (!isServiced(apps[i].name)) checkResources(apps[i]); } // Hotdeploy applications deployApps(); } } /** * Check status of a specific webapp, for use with stuff like management webapps. */ public void check(String name) { DeployedApplication app = deployed.get(name); if (app != null) { checkResources(app); } else { deployApps(name); } } /** * Add a new Context to be managed by us. * Entry point for the admin webapp, and other JMX Context controllers. */ public void manageApp(Context context) { String contextName = context.getName(); if (deployed.containsKey(contextName)) return; DeployedApplication deployedApp = new DeployedApplication(contextName); // Add the associated docBase to the redeployed list if it's a WAR boolean isWar = false; if (context.getDocBase() != null) { File docBase = new File(context.getDocBase()); if (!docBase.isAbsolute()) { docBase = new File(host.getAppBaseFile(), context.getDocBase()); } deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); if (docBase.getAbsolutePath().toLowerCase(Locale.ENGLISH).endsWith(".war")) { isWar = true; } } host.addChild(context); // Add the eventual unpacked WAR and all the resources which will be // watched inside it if (isWar && unpackWARs) { File docBase = new File(host.getAppBaseFile(), context.getBaseName()); deployedApp.redeployResources.put(docBase.getAbsolutePath(), Long.valueOf(docBase.lastModified())); addWatchedResources(deployedApp, docBase.getAbsolutePath(), context); } else { addWatchedResources(deployedApp, null, context); } deployed.put(contextName, deployedApp); } /** * Remove a webapp from our control. * Entry point for the admin webapp, and other JMX Context controllers. */ public void unmanageApp(String contextName) { if(isServiced(contextName)) { deployed.remove(contextName); host.removeChild(host.findChild(contextName)); } } // ----------------------------------------------------- Instance Variables /** * This class represents the state of a deployed application, as well as * the monitored resources. */ protected static class DeployedApplication { public DeployedApplication(String name) { this.name = name; } /** * Application context path. The assertion is that * (host.getChild(name) != null). */ public final String name; /** * Any modification of the specified (static) resources will cause a * redeployment of the application. If any of the specified resources is * removed, the application will be undeployed. Typically, this will * contain resources like the context.xml file, a compressed WAR path. * The value is the last modification time. */ public final LinkedHashMap<String, Long> redeployResources = new LinkedHashMap<>(); /** * Any modification of the specified (static) resources will cause a * reload of the application. This will typically contain resources * such as the web.xml of a webapp, but can be configured to contain * additional descriptors. * The value is the last modification time. */ public final HashMap<String, Long> reloadResources = new HashMap<>(); /** * Instant where the application was last put in service. */ public long timestamp = System.currentTimeMillis(); } private static class DeployDescriptor implements Runnable { private HostConfig config; private ContextName cn; private File descriptor; public DeployDescriptor(HostConfig config, ContextName cn, File descriptor) { this.config = config; this.cn = cn; this.descriptor= descriptor; } @Override public void run() { config.deployDescriptor(cn, descriptor); } } private static class DeployWar implements Runnable { private HostConfig config; private ContextName cn; private File war; public DeployWar(HostConfig config, ContextName cn, File war) { this.config = config; this.cn = cn; this.war = war; } @Override public void run() { config.deployWAR(cn, war); } } private static class DeployDirectory implements Runnable { private HostConfig config; private ContextName cn; private File dir; public DeployDirectory(HostConfig config, ContextName cn, File dir) { this.config = config; this.cn = cn; this.dir = dir; } @Override public void run() { config.deployDirectory(cn, dir); } } }
Suppress incorrect warning git-svn-id: 79cef5a5a257cc9dbe40a45ac190115b4780e2d0@1364155 13f79535-47bb-0310-9956-ffa450edef68
java/org/apache/catalina/startup/HostConfig.java
Suppress incorrect warning
Java
apache-2.0
03d4e9b5f17bfc6c3aee85098fec44328c93ca51
0
BuaBook/http-common
package com.buabook.http.common; import java.io.IOException; import java.util.Map; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.buabook.http.common.auth.HttpBasicAuthenticationInitializer; import com.buabook.http.common.auth.HttpBearerAuthenticationInitializer; import com.buabook.http.common.exceptions.HttpClientRequestFailedException; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.EmptyContent; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpContent; import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.HttpRequest; import com.google.api.client.http.HttpRequestFactory; import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpResponseException; import com.google.api.client.http.UrlEncodedContent; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.common.base.Stopwatch; import com.google.common.base.Strings; import com.google.common.net.MediaType; /** * <h3>HTTP Client Access Library</h3> * (c) 2015 Sport Trades Ltd * * @author Jas Rajasansir * @version 1.0.0 * @since 13 Oct 2015 */ public class HttpClient { private static final Logger log = LoggerFactory.getLogger(HttpClient.class); private final HttpRequestFactory requestFactory; public HttpClient() { this.requestFactory = new NetHttpTransport().createRequestFactory(); } protected HttpClient(HttpRequestFactory customRequestFactory) { this.requestFactory = customRequestFactory; } /** * HTTP client instantiation with Basic Authentication configured. See <a href="https://en.wikipedia.org/wiki/Basic_access_authentication#Client_side"> * https://en.wikipedia.org/wiki/Basic_access_authentication#Client_side</a>. * @see HttpBasicAuthenticationInitializer */ public HttpClient(String username, String password) { this.requestFactory = new NetHttpTransport().createRequestFactory(new HttpBasicAuthenticationInitializer(username, password)); } /** * HTTP client instantiation with OAuth 2.0 (<code>Bearer</code>) authentication. * @see HttpBearerAuthenticationInitializer */ public HttpClient(String bearerAuthToken) { this.requestFactory = new NetHttpTransport().createRequestFactory(new HttpBearerAuthenticationInitializer(bearerAuthToken)); } /** * <p>Standard HTTP GET request to the specified URL.</p> * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @param url The full URL to query. {@link HttpHelpers#appendUrlParameters(String, Map)} is useful to append query parameters to * the URL before passing into this function. * @throws HttpClientRequestFailedException If the GET returns any HTTP error code or the request fails due to an {@link IOException} * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doGet(String url) throws HttpClientRequestFailedException { if(Strings.isNullOrEmpty(url)) throw new IllegalArgumentException("No URL specified"); log.debug("Attempting HTTP GET [ URL: " + url + " ]"); GenericUrl target = new GenericUrl(url); HttpResponse response = null; Stopwatch timer = Stopwatch.createStarted(); try { HttpRequest request = requestFactory.buildGetRequest(target); response = request.execute(); } catch(HttpResponseException e) { log.error("HTTP client GET failed due to bad HTTP status code [ URL: " + url + " ] [ Status Code: " + e.getStatusCode() + " ] [ In Flight: " + timer.stop() + " ]"); throw new HttpClientRequestFailedException(e); } catch (IOException e) { log.error("HTTP client GET failed [ URL: " + url + " ] [ In Flight: " + timer.stop() + " ]. Error - " + e.getMessage(), e); throw new HttpClientRequestFailedException(e); } log.debug("HTTP GET successful [ URL: " + url + " ] [ In Flight: " + timer.stop() + " ]"); return response; } /** * <p>Standard HTTP POST equivalent to <code>&lt;form&gt;</code> or Content-Type <code>application/x-www-form-urlencoded</code>.</p> * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @param url * @param map * @return * @throws HttpClientRequestFailedException If the POST returns any HTTP error code or the request fails due to an {@link IOException} * @see #doPost(String, HttpContent, HttpHeaders) * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doPost(String url, Map<String, Object> map) throws HttpClientRequestFailedException { UrlEncodedContent content = new UrlEncodedContent(map); return doPost(url, content, null); } /** * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @param contentType A standard HTTP Content-Type (e.g. "application/json"). If this is <code>null</code> or empty string, * "text/plain" will be used * @param postContent The content to POST. Pass <code>null</code> or empty string to send no content * @param headers Any custom headers that need to be set. Pass <code>null</code> if not required * @throws HttpClientRequestFailedException If the GET returns any HTTP error code or the request fails due to an {@link IOException} * @see #doPost(String, HttpContent, HttpHeaders) * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doPost(String url, MediaType contentType, String postContent, HttpHeaders headers) throws HttpClientRequestFailedException { HttpContent content = null; if(contentType == null) contentType = MediaType.PLAIN_TEXT_UTF_8; if(Strings.isNullOrEmpty(postContent)) content = new EmptyContent(); else content = ByteArrayContent.fromString(contentType.toString(), postContent); return doPost(url, content, headers); } /** * <p>Performs a HTTP POST based on the specified content and URL. If you are unsure what content to pass, look at * the other <code>doPost</code> methods.</p> * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @return The response from the server * @throws IllegalArgumentException If either of the parameters are <code>null</code> * @throws HttpClientRequestFailedException If the request fails due to underlying network errors or a bad HTTP server status (e.g. 404, 500) * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doPost(String url, HttpContent content, HttpHeaders headers) throws IllegalArgumentException, HttpClientRequestFailedException { if(Strings.isNullOrEmpty(url)) throw new IllegalArgumentException("No URL specified"); if(content == null) throw new IllegalArgumentException("Content cannot be null"); log.debug("Attempting HTTP POST [ URL: " + url + " ] [ Content-Type: " + content.getType() + " ] [ Custom Headers: " + ((headers == null) ? 0 : headers.size()) + " ]"); GenericUrl target = new GenericUrl(url); HttpResponse response = null; Stopwatch timer = Stopwatch.createStarted(); try { HttpRequest request = requestFactory.buildPostRequest(target, content); if(headers != null) request.setHeaders(headers); response = request.execute(); } catch (HttpResponseException e) { log.error("HTTP client POST failed due to bad HTTP status code [ URL: " + url + " ] [ Status Code: " + e.getStatusCode() + " ] [ In Flight: " + timer.stop() + " ]"); throw new HttpClientRequestFailedException(e); } catch (IOException e) { log.error("HTTP client POST failed [ URL: " + url + " ] [ In Flight: " + timer.stop() + " ]. Error - " + e.getMessage(), e); throw new HttpClientRequestFailedException(e); } log.debug("HTTP POST successful [ URL: " + url + " ] [ In Flight: " + timer.stop() + " ]"); return response; } public HttpResponse doPut(String url, HttpContent content) throws IllegalArgumentException, HttpClientRequestFailedException { if(Strings.isNullOrEmpty(url)) throw new IllegalArgumentException("No URL specified"); if(content == null) throw new IllegalArgumentException("Content cannot be null"); log.debug("Attempting HTTP PUT [ URL: " + url + " ] [ Content-Type: " + content.getType() + " ]"); GenericUrl target = new GenericUrl(url); HttpResponse response = null; Stopwatch timer = Stopwatch.createStarted(); try { HttpRequest request = requestFactory.buildPutRequest(target, content); response = request.execute(); } catch (HttpResponseException e) { log.error("HTTP client PUT failed due to bad HTTP status code [ URL: " + url + " ] [ Status Code: " + e.getStatusCode() + " ] [ In Flight: " + timer.stop() + " ]"); throw new HttpClientRequestFailedException(e); } catch (IOException e) { log.error("HTTP client PUT failed [ URL: " + url + " ] [ In Flight: " + timer.stop() + "]. Error - " + e.getMessage(), e); throw new HttpClientRequestFailedException(e); } log.debug("HTTP PUT successful [ URL: " + url + " ] [ In Flight: " + timer.stop() + " ]"); return response; } /** * <b>NOTE</b>: After reading the response, {@link HttpResponse#disconnect()} will be called as required * by the documentation. * @param response * @return The body of the response as a string */ public static String getResponseAsString(HttpResponse response) { if(response == null) return ""; String responseString = ""; try { responseString = response.parseAsString(); } catch (IOException e) { log.error("Failed to convert HTTP response to string. Error - " + e.getMessage(), e); } finally { try { response.disconnect(); } catch (IOException e) {} } return responseString; } /** * @param response * @return The body of the response as a JSON object * @throws JSONException If the string response does not parse into valid JSON * @see #getResponseAsString(HttpResponse) */ public static JSONObject getResponseAsJson(HttpResponse response) throws JSONException { return new JSONObject(getResponseAsString(response)); } }
src/main/java/com/buabook/http/common/HttpClient.java
package com.buabook.http.common; import java.io.IOException; import java.util.Map; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.buabook.http.common.auth.HttpBasicAuthenticationInitializer; import com.buabook.http.common.auth.HttpBearerAuthenticationInitializer; import com.buabook.http.common.exceptions.HttpClientRequestFailedException; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.EmptyContent; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpContent; import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.HttpRequest; import com.google.api.client.http.HttpRequestFactory; import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpResponseException; import com.google.api.client.http.UrlEncodedContent; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.common.base.Strings; import com.google.common.net.MediaType; /** * <h3>HTTP Client Access Library</h3> * (c) 2015 Sport Trades Ltd * * @author Jas Rajasansir * @version 1.0.0 * @since 13 Oct 2015 */ public class HttpClient { private static final Logger log = LoggerFactory.getLogger(HttpClient.class); private final HttpRequestFactory requestFactory; public HttpClient() { this.requestFactory = new NetHttpTransport().createRequestFactory(); } protected HttpClient(HttpRequestFactory customRequestFactory) { this.requestFactory = customRequestFactory; } /** * HTTP client instantiation with Basic Authentication configured. See <a href="https://en.wikipedia.org/wiki/Basic_access_authentication#Client_side"> * https://en.wikipedia.org/wiki/Basic_access_authentication#Client_side</a>. * @see HttpBasicAuthenticationInitializer */ public HttpClient(String username, String password) { this.requestFactory = new NetHttpTransport().createRequestFactory(new HttpBasicAuthenticationInitializer(username, password)); } /** * HTTP client instantiation with OAuth 2.0 (<code>Bearer</code>) authentication. * @see HttpBearerAuthenticationInitializer */ public HttpClient(String bearerAuthToken) { this.requestFactory = new NetHttpTransport().createRequestFactory(new HttpBearerAuthenticationInitializer(bearerAuthToken)); } /** * <p>Standard HTTP GET request to the specified URL.</p> * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @param url The full URL to query. {@link HttpHelpers#appendUrlParameters(String, Map)} is useful to append query parameters to * the URL before passing into this function. * @throws HttpClientRequestFailedException If the GET returns any HTTP error code or the request fails due to an {@link IOException} * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doGet(String url) throws HttpClientRequestFailedException { if(Strings.isNullOrEmpty(url)) throw new IllegalArgumentException("No URL specified"); GenericUrl target = new GenericUrl(url); HttpResponse response = null; try { HttpRequest request = requestFactory.buildGetRequest(target); response = request.execute(); } catch(HttpResponseException e) { log.error("HTTP client GET failed due to bad HTTP status code [ Status Code: " + e.getStatusCode() + " ]"); throw new HttpClientRequestFailedException(e); } catch (IOException e) { log.error("HTTP client GET failed [ URL: " + url + " ]. Error - " + e.getMessage(), e); throw new HttpClientRequestFailedException(e); } return response; } /** * <p>Standard HTTP POST equivalent to <code>&lt;form&gt;</code> or Content-Type <code>application/x-www-form-urlencoded</code>.</p> * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @param url * @param map * @return * @throws HttpClientRequestFailedException If the POST returns any HTTP error code or the request fails due to an {@link IOException} * @see #doPost(String, HttpContent, HttpHeaders) * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doPost(String url, Map<String, Object> map) throws HttpClientRequestFailedException { UrlEncodedContent content = new UrlEncodedContent(map); return doPost(url, content, null); } /** * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @param contentType A standard HTTP Content-Type (e.g. "application/json"). If this is <code>null</code> or empty string, * "text/plain" will be used * @param postContent The content to POST. Pass <code>null</code> or empty string to send no content * @param headers Any custom headers that need to be set. Pass <code>null</code> if not required * @throws HttpClientRequestFailedException If the GET returns any HTTP error code or the request fails due to an {@link IOException} * @see #doPost(String, HttpContent, HttpHeaders) * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doPost(String url, MediaType contentType, String postContent, HttpHeaders headers) throws HttpClientRequestFailedException { HttpContent content = null; if(contentType == null) contentType = MediaType.PLAIN_TEXT_UTF_8; if(Strings.isNullOrEmpty(postContent)) content = new EmptyContent(); else content = ByteArrayContent.fromString(contentType.toString(), postContent); return doPost(url, content, headers); } /** * <p>Performs a HTTP POST based on the specified content and URL. If you are unsure what content to pass, look at * the other <code>doPost</code> methods.</p> * <p><b>NOTE</b>: You must call {@link HttpResponse#disconnect()} after using the * response. {@link #getResponseAsString(HttpResponse)} and {@link #getResponseAsJson(HttpResponse)} will do this for you when * used.</p> * @return The response from the server * @throws IllegalArgumentException If either of the parameters are <code>null</code> * @throws HttpClientRequestFailedException If the request fails due to underlying network errors or a bad HTTP server status (e.g. 404, 500) * @see #getResponseAsString(HttpResponse) * @see #getResponseAsJson(HttpResponse) */ public HttpResponse doPost(String url, HttpContent content, HttpHeaders headers) throws IllegalArgumentException, HttpClientRequestFailedException { if(Strings.isNullOrEmpty(url)) throw new IllegalArgumentException("No URL specified"); if(content == null) throw new IllegalArgumentException("Content cannot be null"); GenericUrl target = new GenericUrl(url); HttpResponse response = null; try { HttpRequest request = requestFactory.buildPostRequest(target, content); if(headers != null) request.setHeaders(headers); response = request.execute(); } catch (HttpResponseException e) { log.error("HTTP client POST failed due to bad HTTP status code [ Status Code: " + e.getStatusCode() + " ]"); throw new HttpClientRequestFailedException(e); } catch (IOException e) { log.error("HTTP client POST failed [ URL: " + url + " ]. Error - " + e.getMessage(), e); throw new HttpClientRequestFailedException(e); } return response; } public HttpResponse doPut(String url, HttpContent content) throws IllegalArgumentException, HttpClientRequestFailedException { if(Strings.isNullOrEmpty(url)) throw new IllegalArgumentException("No URL specified"); if(content == null) throw new IllegalArgumentException("Content cannot be null"); GenericUrl target = new GenericUrl(url); HttpResponse response = null; try { HttpRequest request = requestFactory.buildPutRequest(target, content); response = request.execute(); } catch (HttpResponseException e) { log.error("HTTP client POST failed due to bad HTTP status code [ Status Code: " + e.getStatusCode() + " ]"); throw new HttpClientRequestFailedException(e); } catch (IOException e) { log.error("HTTP client POST failed [ URL: " + url + " ]. Error - " + e.getMessage(), e); throw new HttpClientRequestFailedException(e); } return response; } /** * <b>NOTE</b>: After reading the response, {@link HttpResponse#disconnect()} will be called as required * by the documentation. * @param response * @return The body of the response as a string */ public static String getResponseAsString(HttpResponse response) { if(response == null) return ""; String responseString = ""; try { responseString = response.parseAsString(); } catch (IOException e) { log.error("Failed to convert HTTP response to string. Error - " + e.getMessage(), e); } finally { try { response.disconnect(); } catch (IOException e) {} } return responseString; } /** * @param response * @return The body of the response as a JSON object * @throws JSONException If the string response does not parse into valid JSON * @see #getResponseAsString(HttpResponse) */ public static JSONObject getResponseAsJson(HttpResponse response) throws JSONException { return new JSONObject(getResponseAsString(response)); } }
HttpClient: Add debug logging with in-flight query time
src/main/java/com/buabook/http/common/HttpClient.java
HttpClient: Add debug logging with in-flight query time
Java
apache-2.0
b64130d4ace5bc92d44c9498d6d044c78ee3368a
0
sheliu/openregistry,sheliu/openregistry,Unicon/openregistry,Rutgers-IDM/openregistry,Unicon/openregistry,sheliu/openregistry,Jasig/openregistry,Rutgers-IDM/openregistry,Unicon/openregistry,Jasig/openregistry,Jasig/openregistry,Unicon/openregistry,sheliu/openregistry,msidd/openregistry,Rutgers-IDM/openregistry,Unicon/openregistry,msidd/openregistry,Unicon/openregistry,Rutgers-IDM/openregistry,Jasig/openregistry,sheliu/openregistry,Rutgers-IDM/openregistry,msidd/openregistry,Jasig/openregistry,msidd/openregistry,msidd/openregistry,Jasig/openregistry,Unicon/openregistry
/** * Copyright (C) 2009 Jasig, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openregistry.core.web.resources; import org.springframework.stereotype.Component; import org.springframework.context.annotation.Scope; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.ObjectFactory; import org.openregistry.core.domain.sor.ReconciliationCriteria; import org.openregistry.core.domain.sor.SorPerson; import org.openregistry.core.domain.sor.SorRole; import org.openregistry.core.domain.*; import org.openregistry.core.service.PersonService; import org.openregistry.core.service.ServiceExecutionResult; import org.openregistry.core.service.IdentifierChangeService; import org.openregistry.core.service.reconciliation.PersonMatch; import org.openregistry.core.service.reconciliation.ReconciliationException; import org.openregistry.core.web.resources.representations.LinkRepresentation; import org.openregistry.core.web.resources.representations.PersonRequestRepresentation; import org.openregistry.core.web.resources.representations.PersonResponseRepresentation; import org.openregistry.core.web.resources.representations.RoleRepresentation; import org.openregistry.core.repository.ReferenceRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.*; import javax.ws.rs.core.*; import javax.annotation.Resource; import java.net.URI; import java.util.*; import com.sun.jersey.api.NotFoundException; import com.sun.jersey.api.representation.Form; /** * Root RESTful resource representing people in Open Registry. * This component is managed and autowired by Spring by means of context-component-scan, * and served by Jersey when URI is matched against the @Path definition. This bean is a singleton, * and therefore is thread-safe. * * @author Dmitriy Kopylenko * @since 1.0 */ @Component @Scope("singleton") @Path("/people") public final class PeopleResource { //Jersey specific injection @Context UriInfo uriInfo; @Autowired(required = true) private PersonService personService; @Autowired(required = true) private ReferenceRepository referenceRepository; @Resource(name = "reconciliationCriteriaFactory") private ObjectFactory<ReconciliationCriteria> reconciliationCriteriaObjectFactory; //JSR-250 injection which is more appropriate here for 'autowiring by name' in the case of multiple types //are defined in the app ctx (Strings). The looked up bean name defaults to the property name which //needs an injection. @Resource private String preferredPersonIdentifierType; @Autowired private IdentifierChangeService identifierChangeService; private final Logger logger = LoggerFactory.getLogger(getClass()); private static final String FORCE_ADD_FLAG = "y"; @PUT @Path("{personIdType}/{personId}/roles/{roleCode}") @Consumes(MediaType.APPLICATION_XML) public Response processIncomingRole(@PathParam("personIdType") String personIdType, @PathParam("personId") String personId, @PathParam("roleCode") String roleCode, @QueryParam("sor") String sorSourceId, RoleRepresentation roleRepresentation) { if (sorSourceId == null) { throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("The 'sor' query parameter is missing").build()); } final SorPerson sorPerson = this.personService.findSorPersonByIdentifierAndSourceIdentifier(personIdType, personId, sorSourceId); if (sorPerson == null) { //HTTP 404 throw new NotFoundException( String.format("The person resource identified by [%s/%s] URI does not exist for the given [%s] sor id", personIdType, personId, sorSourceId)); } final RoleInfo roleInfo = this.referenceRepository.getRoleInfoByCode(roleCode); if (roleInfo == null) { throw new NotFoundException( String.format("The role identified by [%s] does not exist", roleCode)); } final SorRole sorRole = buildSorRoleFrom(sorPerson, roleInfo, roleRepresentation); final ServiceExecutionResult result = this.personService.validateAndSaveRoleForSorPerson(sorPerson, sorRole); if (result.getValidationErrors().size() > 0) { throw new WebApplicationException(400); } //HTTP 201 return Response.created(this.uriInfo.getAbsolutePath()).build(); } @GET @Path("{personIdType}/{personId}") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) //auto content negotiation! public PersonResponseRepresentation showPerson(@PathParam("personId") String personId, @PathParam("personIdType") String personIdType) { logger.info(String.format("Searching for a person with {personIdType:%s, personId:%s} ...", personIdType, personId)); final Person person = this.personService.findPersonByIdentifier(personIdType, personId); if (person == null) { //HTTP 404 logger.info("Person is not found."); throw new NotFoundException( String.format("The person resource identified by /people/%s/%s URI does not exist", personIdType, personId)); } logger.info("Person is found. Building a suitable representation..."); return new PersonResponseRepresentation(buildPersonIdentifierRepresentations(person.getIdentifiers())); } @POST @Consumes(MediaType.APPLICATION_XML) public Response processIncomingPerson(PersonRequestRepresentation personRequestRepresentation, @QueryParam("force") String forceAdd) { Response response = null; if (!personRequestRepresentation.checkRequiredData()) { //HTTP 400 return Response.status(Response.Status.BAD_REQUEST).entity("The person entity payload is incomplete.").build(); } final ReconciliationCriteria reconciliationCriteria = buildReconciliationCriteriaFrom(personRequestRepresentation); logger.info("Trying to add incoming person..."); try { final ServiceExecutionResult<Person> result = this.personService.addPerson(reconciliationCriteria); if (!result.succeeded()) { logger.info("The incoming person payload did not pass validation. Validation errors: " + result.getValidationErrors()); return Response.status(Response.Status.BAD_REQUEST).entity("The incoming request is malformed.").build(); } final Person person = result.getTargetObject(); final URI uri = buildPersonResourceUri(person); response = Response.created(uri).entity(buildPersonActivationKeyRepresentation(person)).type(MediaType.APPLICATION_FORM_URLENCODED_TYPE).build(); logger.info(String.format("Person successfully created. The person resource URI is %s", uri.toString())); } catch (final ReconciliationException ex) { switch (ex.getReconciliationType()) { case MAYBE: if (FORCE_ADD_FLAG.equals(forceAdd)) { logger.warn("Multiple people found, but doing a 'force add'"); final ServiceExecutionResult<Person> result = this.personService.forceAddPerson(reconciliationCriteria, ex); final Person forcefullyAddedPerson = result.getTargetObject(); final URI uri = buildPersonResourceUri(forcefullyAddedPerson); response = Response.created(uri).entity(buildPersonActivationKeyRepresentation(forcefullyAddedPerson)).type(MediaType.APPLICATION_FORM_URLENCODED_TYPE).build(); logger.info(String.format("Person successfully created (with 'force add' option). The person resource URI is %s", uri.toString())); } else { final List<PersonMatch> conflictingPeopleFound = ex.getMatches(); response = Response.status(409).entity(buildLinksToConflictingPeopleFound(conflictingPeopleFound)).type(MediaType.APPLICATION_XHTML_XML).build(); logger.info("Multiple people found: " + response.getEntity()); } break; case EXACT: final URI uri = buildPersonResourceUri(ex.getMatches().get(0).getPerson()); //HTTP 303 ("See other with GET") response = Response.seeOther(uri).build(); logger.info(String.format("Person already exists. The existing person resource URI is %s.", uri.toString())); break; } } return response; } @DELETE @Path("{personIdType}/{personId}/roles/{roleCode}") public Response deleteRoleForPerson(@PathParam("personIdType") String personIdType, @PathParam("personId") String personId, @PathParam("roleCode") String roleCode, @QueryParam("reason") String terminationReason) { logger.info(String.format("Received a request to delete a role for a person with the following params: " + "{personIdType:%s, personId:%s, roleCode:%s, reason:%s}", personIdType, personId, roleCode, terminationReason)); if (terminationReason == null) { return Response.status(Response.Status.BAD_REQUEST).entity("Please specify the <reason> for termination.").build(); } logger.info("Searching for a person..."); final Person person = this.personService.findPersonByIdentifier(personIdType, personId); if (person == null) { logger.info("Person is not found..."); return Response.status(Response.Status.NOT_FOUND).entity("The specified person is not found in the system").build(); } logger.info("Person is found. Picking out the role for a provided 'roleId'..."); final Role role = person.pickOutRole(roleCode); if (role == null) { logger.info("The Role with the specified 'roleId' is not found in the collection of Person Roles"); return Response.status(Response.Status.NOT_FOUND).entity("The specified role is not found for this person").build(); } logger.info("The Role is found"); if (role.isTerminated()) { logger.info("The Role is already terminated."); //Results in HTTP 204 return null; } try { // TODO re-implement this /* if (!this.personService.deleteSorRole(person, role, terminationReason)) { //HTTP 500. Is this OK? logger.info("The call to PersonService.deleteSorRole returned <false>. Assuming it's an internal error."); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("The operation resulted in an internal error") .build(); }*/ } catch (final IllegalArgumentException ex) { logger.info("The 'terminationReason' did not pass the validation"); return Response.status(Response.Status.BAD_REQUEST).entity(ex.getMessage()).build(); } //If we got here, everything went well. HTTP 204 logger.info("The Role resource has been successfully DELETEd"); return null; } @DELETE @Path("sor/{sorSource}/{sorId}") public Response deleteSystemOfRecordPerson(@PathParam("sorSource") String sorSource, @PathParam("sorId") String sorId, @QueryParam("mistake") @DefaultValue("false") boolean mistake) { try { if (!this.personService.deleteSystemOfRecordPerson(sorSource, sorId, mistake)) { throw new WebApplicationException(new RuntimeException(String.format("Unable to Delete SorPerson for SoR [ %s ] with ID [ %s ]", sorSource, sorId)), 500); } //HTTP 204 logger.debug("The SOR Person resource has been successfully DELETEd"); return null; } catch (final PersonNotFoundException e) { throw new NotFoundException(String.format("The system of record person resource identified by /people/sor/%s/%s URI does not exist", sorSource, sorId)); } } //TODO: what happens if the role (identified by RoleInfo) has been added already? //NOTE: the sponsor is not set (remains null) as it was not defined in the XML payload as was discussed private SorRole buildSorRoleFrom(final SorPerson person, final RoleInfo roleInfo, final RoleRepresentation roleRepresentation) { final SorRole sorRole = person.addRole(roleInfo); sorRole.setSorId("1"); // TODO: what to set here? sorRole.setSourceSorIdentifier(person.getSourceSor()); sorRole.setPersonStatus(referenceRepository.findType(Type.DataTypes.STATUS, "active")); sorRole.setStart(roleRepresentation.startDate); sorRole.setEnd(roleRepresentation.endDate); //Emails for (final RoleRepresentation.Email e : roleRepresentation.emails) { final EmailAddress email = sorRole.addEmailAddress(); email.setAddress(e.address); email.setAddressType(referenceRepository.findType(Type.DataTypes.EMAIL, e.type)); } //Phones for (final RoleRepresentation.Phone ph : roleRepresentation.phones) { final Phone phone = sorRole.addPhone(); phone.setNumber(ph.number); phone.setAddressType(referenceRepository.findType(Type.DataTypes.ADDRESS, ph.addressType)); phone.setPhoneType(referenceRepository.findType(Type.DataTypes.PHONE, ph.type)); phone.setCountryCode(ph.countryCode); phone.setAreaCode(ph.areaCode); phone.setExtension(ph.extension); } //Addresses for (final RoleRepresentation.Address a : roleRepresentation.addresses) { final Address address = sorRole.addAddress(); address.setType(referenceRepository.findType(Type.DataTypes.ADDRESS, a.type)); address.setLine1(a.line1); address.setLine2(a.line2); address.setLine3(a.line3); address.setCity(a.city); address.setPostalCode(a.postalCode); //TODO: how to set Region and Country instances??? Currently there is no way! } return sorRole; } private ReconciliationCriteria buildReconciliationCriteriaFrom(final PersonRequestRepresentation request) { final ReconciliationCriteria ps = this.reconciliationCriteriaObjectFactory.getObject(); ps.getPerson().setSourceSor(request.systemOfRecordId); ps.getPerson().setSorId(request.systemOfRecordPersonId); Name name = ps.getPerson().addName(); name.setGiven(request.firstName); name.setFamily(request.lastName); ps.setEmailAddress(request.email); ps.setPhoneNumber(request.phoneNumber); ps.getPerson().setDateOfBirth(request.dateOfBirth); ps.getPerson().setSsn(request.ssn); ps.getPerson().setGender(request.gender); ps.setAddressLine1(request.addressLine1); ps.setAddressLine2(request.addressLine2); ps.setCity(request.city); ps.setRegion(request.region); ps.setPostalCode(request.postalCode); return ps; } private URI buildPersonResourceUri(final Person person) { for (final Identifier id : person.getIdentifiers()) { if (this.preferredPersonIdentifierType.equals(id.getType().getName())) { return this.uriInfo.getAbsolutePathBuilder().path(this.preferredPersonIdentifierType) .path(id.getValue()).build(); } } //Person MUST have at least one id of the preferred configured type. Results in HTTP 500 throw new IllegalStateException("The person must have at least one id of the preferred configured type " + "which is <" + this.preferredPersonIdentifierType + ">"); } private LinkRepresentation buildLinksToConflictingPeopleFound(List<PersonMatch> matches) { //A little defensive stuff. Will result in HTTP 500 if (matches.isEmpty()) { throw new IllegalStateException("Person matches cannot be empty if reconciliation result is <MAYBE>"); } final List<LinkRepresentation.Link> links = new ArrayList<LinkRepresentation.Link>(); for (final PersonMatch match : matches) { links.add(new LinkRepresentation.Link("person", buildPersonResourceUri(match.getPerson()).toString())); } return new LinkRepresentation(links); } private List<PersonResponseRepresentation.PersonIdentifierRepresentation> buildPersonIdentifierRepresentations(final Set<Identifier> identifiers) { final List<PersonResponseRepresentation.PersonIdentifierRepresentation> idsRep = new ArrayList<PersonResponseRepresentation.PersonIdentifierRepresentation>(); for (final Identifier id : identifiers) { idsRep.add(new PersonResponseRepresentation.PersonIdentifierRepresentation(id.getType().getName(), id.getValue())); } if (idsRep.isEmpty()) { throw new IllegalStateException("Person identifiers cannot be empty"); } return idsRep; } //Content-Type: application/x-www-form-urlencoded private Form buildPersonActivationKeyRepresentation(final Person person) { final Form f = new Form(); f.putSingle("activationKey", person.getCurrentActivationKey().asString()); return f; } }
openregistry-webapp/src/main/java/org/openregistry/core/web/resources/PeopleResource.java
/** * Copyright (C) 2009 Jasig, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openregistry.core.web.resources; import org.springframework.stereotype.Component; import org.springframework.context.annotation.Scope; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.ObjectFactory; import org.openregistry.core.domain.sor.ReconciliationCriteria; import org.openregistry.core.domain.sor.SorPerson; import org.openregistry.core.domain.sor.SorRole; import org.openregistry.core.domain.*; import org.openregistry.core.service.PersonService; import org.openregistry.core.service.ServiceExecutionResult; import org.openregistry.core.service.IdentifierChangeService; import org.openregistry.core.service.reconciliation.PersonMatch; import org.openregistry.core.service.reconciliation.ReconciliationException; import org.openregistry.core.web.resources.representations.LinkRepresentation; import org.openregistry.core.web.resources.representations.PersonRequestRepresentation; import org.openregistry.core.web.resources.representations.PersonResponseRepresentation; import org.openregistry.core.web.resources.representations.RoleRepresentation; import org.openregistry.core.repository.ReferenceRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.*; import javax.ws.rs.core.*; import javax.annotation.Resource; import java.net.URI; import java.util.*; import com.sun.jersey.api.NotFoundException; import com.sun.jersey.api.representation.Form; /** * Root RESTful resource representing people in Open Registry. * This component is managed and autowired by Spring by means of context-component-scan, * and served by Jersey when URI is matched against the @Path definition. This bean is a singleton, * and therefore is thread-safe. * * @author Dmitriy Kopylenko * @since 1.0 */ @Component @Scope("singleton") @Path("/people") public final class PeopleResource { //Jersey specific injection @Context UriInfo uriInfo; @Autowired(required = true) private PersonService personService; @Autowired(required = true) private ReferenceRepository referenceRepository; @Resource(name = "reconciliationCriteriaFactory") private ObjectFactory<ReconciliationCriteria> reconciliationCriteriaObjectFactory; //JSR-250 injection which is more appropriate here for 'autowiring by name' in the case of multiple types //are defined in the app ctx (Strings). The looked up bean name defaults to the property name which //needs an injection. @Resource private String preferredPersonIdentifierType; @Autowired private IdentifierChangeService identifierChangeService; private final Logger logger = LoggerFactory.getLogger(getClass()); private static final String FORCE_ADD_FLAG = "y"; @PUT @Path("{personIdType}/{personId}/roles/{roleCode}") @Consumes(MediaType.APPLICATION_XML) public Response processIncomingRole(@PathParam("personIdType") String personIdType, @PathParam("personId") String personId, @PathParam("roleCode") String roleCode, @QueryParam("sor") String sorSourceId, RoleRepresentation roleRepresentation) { if (sorSourceId == null) { throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("The 'sor' query parameter is missing").build()); } SorPerson sorPerson = this.personService.findSorPersonByIdentifierAndSourceIdentifier(personIdType, personId, sorSourceId); if (sorPerson == null) { //HTTP 404 throw new NotFoundException( String.format("The person resource identified by [%s/%s] URI does not exist for the given [%s] sor id", personIdType, personId, sorSourceId)); } RoleInfo roleInfo = this.referenceRepository.getRoleInfoByCode(roleCode); if (roleInfo == null) { throw new NotFoundException( String.format("The role identified by [%s] does not exist", roleCode)); } SorRole sorRole = buildSorRoleFrom(sorPerson, roleInfo, roleRepresentation); ServiceExecutionResult result = this.personService.validateAndSaveRoleForSorPerson(sorPerson, sorRole); if (result.getValidationErrors().size() > 0) { throw new WebApplicationException(400); } //HTTP 201 return Response.created(this.uriInfo.getAbsolutePath()).build(); } @GET @Path("{personIdType}/{personId}") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) //auto content negotiation! public PersonResponseRepresentation showPerson(@PathParam("personId") String personId, @PathParam("personIdType") String personIdType) { logger.info(String.format("Searching for a person with {personIdType:%s, personId:%s} ...", personIdType, personId)); final Person person = this.personService.findPersonByIdentifier(personIdType, personId); if (person == null) { //HTTP 404 logger.info("Person is not found."); throw new NotFoundException( String.format("The person resource identified by /people/%s/%s URI does not exist", personIdType, personId)); } logger.info("Person is found. Building a suitable representation..."); return new PersonResponseRepresentation(buildPersonIdentifierRepresentations(person.getIdentifiers())); } @POST @Consumes(MediaType.APPLICATION_XML) public Response processIncomingPerson(PersonRequestRepresentation personRequestRepresentation, @QueryParam("force") String forceAdd) { Response response = null; URI uri = null; ReconciliationCriteria reconciliationCriteria = null; if (!personRequestRepresentation.checkRequiredData()) { //HTTP 400 return Response.status(Response.Status.BAD_REQUEST).entity("The person entity payload is incomplete.").build(); } reconciliationCriteria = buildReconciliationCriteriaFrom(personRequestRepresentation); logger.info("Trying to add incoming person..."); try { final ServiceExecutionResult<Person> result = this.personService.addPerson(reconciliationCriteria); if (!result.succeeded()) { logger.info("The incoming person payload did not pass validation. Validation errors: " + result.getValidationErrors()); return Response.status(Response.Status.BAD_REQUEST).entity("The incoming request is malformed.").build(); } final Person person = result.getTargetObject(); uri = buildPersonResourceUri(person); response = Response.created(uri).entity(buildPersonActivationKeyRepresentation(person)).type(MediaType.APPLICATION_FORM_URLENCODED_TYPE).build(); logger.info(String.format("Person successfully created. The person resource URI is %s", uri.toString())); } catch (final ReconciliationException ex) { switch (ex.getReconciliationType()) { case MAYBE: if (FORCE_ADD_FLAG.equals(forceAdd)) { logger.warn("Multiple people found, but doing a 'force add'"); final ServiceExecutionResult<Person> result = this.personService.forceAddPerson(reconciliationCriteria, ex); final Person forcefullyAddedPerson = result.getTargetObject(); uri = buildPersonResourceUri(forcefullyAddedPerson); response = Response.created(uri).entity(buildPersonActivationKeyRepresentation(forcefullyAddedPerson)).type(MediaType.APPLICATION_FORM_URLENCODED_TYPE).build(); logger.info(String.format("Person successfully created (with 'force add' option). The person resource URI is %s", uri.toString())); } else { final List<PersonMatch> conflictingPeopleFound = ex.getMatches(); response = Response.status(409).entity(buildLinksToConflictingPeopleFound(conflictingPeopleFound)).type(MediaType.APPLICATION_XHTML_XML).build(); logger.info("Multiple people found: " + response.getEntity()); } break; case EXACT: uri = buildPersonResourceUri(ex.getMatches().get(0).getPerson()); //HTTP 303 ("See other with GET") response = Response.seeOther(uri).build(); logger.info(String.format("Person already exists. The existing person resource URI is %s.", uri.toString())); break; } } return response; } @DELETE @Path("{personIdType}/{personId}/roles/{roleCode}") public Response deleteRoleForPerson(@PathParam("personIdType") String personIdType, @PathParam("personId") String personId, @PathParam("roleCode") String roleCode, @QueryParam("reason") String terminationReason) { logger.info(String.format("Received a request to delete a role for a person with the following params: " + "{personIdType:%s, personId:%s, roleCode:%s, reason:%s}", personIdType, personId, roleCode, terminationReason)); if (terminationReason == null) { return Response.status(Response.Status.BAD_REQUEST).entity("Please specify the <reason> for termination.").build(); } logger.info("Searching for a person..."); Person person = this.personService.findPersonByIdentifier(personIdType, personId); if (person == null) { logger.info("Person is not found..."); return Response.status(Response.Status.NOT_FOUND).entity("The specified person is not found in the system").build(); } logger.info("Person is found. Picking out the role for a provided 'roleId'..."); Role role = person.pickOutRole(roleCode); if (role == null) { logger.info("The Role with the specified 'roleId' is not found in the collection of Person Roles"); return Response.status(Response.Status.NOT_FOUND).entity("The specified role is not found for this person").build(); } logger.info("The Role is found"); if (role.isTerminated()) { logger.info("The Role is already terminated."); //Results in HTTP 204 return null; } try { // TODO re-implement this /* if (!this.personService.deleteSorRole(person, role, terminationReason)) { //HTTP 500. Is this OK? logger.info("The call to PersonService.deleteSorRole returned <false>. Assuming it's an internal error."); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("The operation resulted in an internal error") .build(); }*/ } catch (IllegalArgumentException ex) { logger.info("The 'terminationReason' did not pass the validation"); return Response.status(Response.Status.BAD_REQUEST).entity(ex.getMessage()).build(); } //If we got here, everything went well. HTTP 204 logger.info("The Role resource has been successfully DELETEd"); return null; } @DELETE @Path("sor/{sorSource}/{sorId}") public Response deleteSystemOfRecordPerson(@PathParam("sorSource") String sorSource, @PathParam("sorId") String sorId, @QueryParam("mistake") @DefaultValue("false") boolean mistake) { try { if (!this.personService.deleteSystemOfRecordPerson(sorSource, sorId, mistake)) { throw new WebApplicationException( new RuntimeException( String.format("Unable to Delete SorPerson for SoR [ %s ] with ID [ %s ]", sorSource, sorId)), 500); } //HTTP 204 logger.debug("The SOR Person resource has been successfully DELETEd"); return null; } catch (final PersonNotFoundException e) { throw new NotFoundException(String.format("The system of record person resource identified by /people/sor/%s/%s URI does not exist", sorSource, sorId)); } } //TODO: what happens if the role (identified by RoleInfo) has been added already? //NOTE: the sponsor is not set (remains null) as it was not defined in the XML payload as was discussed private SorRole buildSorRoleFrom(final SorPerson person, final RoleInfo roleInfo, final RoleRepresentation roleRepresentation) { SorRole sorRole = person.addRole(roleInfo); sorRole.setSorId("1"); // TODO: what to set here? sorRole.setSourceSorIdentifier(person.getSourceSor()); sorRole.setPersonStatus(referenceRepository.findType(Type.DataTypes.STATUS, "active")); sorRole.setStart(roleRepresentation.startDate); sorRole.setEnd(roleRepresentation.endDate); //Emails for (final RoleRepresentation.Email e : roleRepresentation.emails) { final EmailAddress email = sorRole.addEmailAddress(); email.setAddress(e.address); email.setAddressType(referenceRepository.findType(Type.DataTypes.EMAIL, e.type)); } //Phones for (final RoleRepresentation.Phone ph : roleRepresentation.phones) { final Phone phone = sorRole.addPhone(); phone.setNumber(ph.number); phone.setAddressType(referenceRepository.findType(Type.DataTypes.ADDRESS, ph.addressType)); phone.setPhoneType(referenceRepository.findType(Type.DataTypes.PHONE, ph.type)); phone.setCountryCode(ph.countryCode); phone.setAreaCode(ph.areaCode); phone.setExtension(ph.extension); } //Addresses for (final RoleRepresentation.Address a : roleRepresentation.addresses) { final Address address = sorRole.addAddress(); address.setType(referenceRepository.findType(Type.DataTypes.ADDRESS, a.type)); address.setLine1(a.line1); address.setLine2(a.line2); address.setLine3(a.line3); address.setCity(a.city); address.setPostalCode(a.postalCode); //TODO: how to set Region and Country instances??? Currently there is no way! } return sorRole; } private ReconciliationCriteria buildReconciliationCriteriaFrom(final PersonRequestRepresentation request) { ReconciliationCriteria ps = this.reconciliationCriteriaObjectFactory.getObject(); ps.getPerson().setSourceSor(request.systemOfRecordId); ps.getPerson().setSorId(request.systemOfRecordPersonId); Name name = ps.getPerson().addName(); name.setGiven(request.firstName); name.setFamily(request.lastName); ps.setEmailAddress(request.email); ps.setPhoneNumber(request.phoneNumber); ps.getPerson().setDateOfBirth(request.dateOfBirth); ps.getPerson().setSsn(request.ssn); ps.getPerson().setGender(request.gender); ps.setAddressLine1(request.addressLine1); ps.setAddressLine2(request.addressLine2); ps.setCity(request.city); ps.setRegion(request.region); ps.setPostalCode(request.postalCode); return ps; } private URI buildPersonResourceUri(final Person person) { for (final Identifier id : person.getIdentifiers()) { if (this.preferredPersonIdentifierType.equals(id.getType().getName())) { return this.uriInfo.getAbsolutePathBuilder().path(this.preferredPersonIdentifierType) .path(id.getValue()).build(); } } //Person MUST have at least one id of the preferred configured type. Results in HTTP 500 throw new IllegalStateException("The person must have at least one id of the preferred configured type " + "which is <" + this.preferredPersonIdentifierType + ">"); } private LinkRepresentation buildLinksToConflictingPeopleFound(List<PersonMatch> matches) { //A little defensive stuff. Will result in HTTP 500 if (matches.isEmpty()) { throw new IllegalStateException("Person matches cannot be empty if reconciliation result is <MAYBE>"); } final List<LinkRepresentation.Link> links = new ArrayList<LinkRepresentation.Link>(); for (final PersonMatch match : matches) { links.add(new LinkRepresentation.Link("person", buildPersonResourceUri(match.getPerson()).toString())); } return new LinkRepresentation(links); } private List<PersonResponseRepresentation.PersonIdentifierRepresentation> buildPersonIdentifierRepresentations(final Set<Identifier> identifiers) { final List<PersonResponseRepresentation.PersonIdentifierRepresentation> idsRep = new ArrayList<PersonResponseRepresentation.PersonIdentifierRepresentation>(); for (final Identifier id : identifiers) { idsRep.add(new PersonResponseRepresentation.PersonIdentifierRepresentation(id.getType().getName(), id.getValue())); } if (idsRep.isEmpty()) { throw new IllegalStateException("Person identifiers cannot be empty"); } return idsRep; } //Content-Type: application/x-www-form-urlencoded private Form buildPersonActivationKeyRepresentation(final Person person) { Form f = new Form(); f.putSingle("activationKey", person.getCurrentActivationKey().asString()); return f; } }
NOJIRA added a bunch of finals that were missing ;-) git-svn-id: 996c6d7d570f9e8d676b69394667d4ecb3e4cdb3@18784 1580c273-15eb-1042-8a87-dc5d815c88a0
openregistry-webapp/src/main/java/org/openregistry/core/web/resources/PeopleResource.java
NOJIRA added a bunch of finals that were missing ;-)