lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
epl-1.0
0da6cf7c316d6a02b41312a38e4538adaf34715c
0
css-iter/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,css-iter/cs-studio
package org.csstudio.nams.common.testhelper; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; /** * Marks methods and classes as for testing only * * Could be used by an AnnotationProcessor to ensure the intended use. * * @author Gösta Steen, Tobias Rathjen */ @Retention(RetentionPolicy.SOURCE) public @interface ForTesting { }
applications/plugins/org.csstudio.nams.common/src/org/csstudio/nams/common/testhelper/ForTesting.java
package org.csstudio.nams.common.testhelper; /** * Marks methods and classes as for testing only * * @author Gösta Steen, Tobias Rathjen */ public @interface ForTesting { }
tr: Removed deprecated method in AbstractConjunctionFilterCondition. Added source retention policy in @ForTesting
applications/plugins/org.csstudio.nams.common/src/org/csstudio/nams/common/testhelper/ForTesting.java
tr: Removed deprecated method in AbstractConjunctionFilterCondition. Added source retention policy in @ForTesting
Java
agpl-3.0
7fab975daa352cadd10e4e0044ea026ac138eca4
0
geomajas/geomajas-project-server,geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-client-gwt2,geomajas/geomajas-project-server,geomajas/geomajas-project-client-gwt,geomajas/geomajas-project-client-gwt2,geomajas/geomajas-project-server
/* * This is part of Geomajas, a GIS framework, http://www.geomajas.org/. * * Copyright 2008-2012 Geosparc nv, http://www.geosparc.com/, Belgium. * * The program is available in open source according to the GNU Affero * General Public License. All contributions in this program are covered * by the Geomajas Contributors License Agreement. For full licensing * details, see LICENSE.txt in the project root. */ package org.geomajas.layer.wms.mvc; import org.geomajas.layer.wms.WmsLayer; import org.geomajas.plugin.caching.service.CacheManagerService; import org.geomajas.service.TestRecorder; import org.geomajas.testdata.TestPathBinaryStreamAssert; import org.geomajas.testdata.rule.SecurityRule; import org.junit.After; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; import static org.fest.assertions.Assertions.assertThat; /** * Test for {@link org.geomajas.layer.wms.mvc.WmsController}. * * @author Joachim Van der Auwera */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = {"/org/geomajas/spring/geomajasContext.xml", "/wmsContext.xml", "/org/geomajas/spring/testRecorder.xml", "/org/geomajas/testdata/allowAll.xml"}) public class WmsControllerCacheTest { private static final String IMAGE_CLASS_PATH = "reference"; private static final double DELTA = 1E-6; @Autowired private WmsController wmsController; @Autowired private TestRecorder testRecorder; @Autowired @Rule public SecurityRule securityRule; @Autowired private CacheManagerService cacheManagerService; @Autowired @Qualifier("cachedBlue") private WmsLayer cachedWms; @After public void clearCache() throws Exception { // clear side effects to assure the next test run works cacheManagerService.drop(cachedWms); Thread.sleep(5000); } @Test //@Ignore // test fails as the passivated state is not removed, see CACHE-33 public void testReadCachedImage() throws Exception { MockHttpServletRequest request = new MockHttpServletRequest(); MockHttpServletResponse response = new MockHttpServletResponse(); Map<String, String> parameters = new HashMap<String, String>(); parameters.put("SERVICE", "WMS"); parameters.put("layers", "bluemarble"); parameters.put("WIDTH", "512"); parameters.put("HEIGHT", "512"); parameters.put("bbox", "-52.01245495052001,-28.207099921352835,11.947593278789554,35.75294830795673"); parameters.put("format", "image/jpeg"); parameters.put("version", "1.1.1"); parameters.put("srs", "EPSG:4326"); parameters.put("styles", ""); parameters.put("request", "GetMap"); request.setParameters(parameters); request.setRequestURI("d/wms/cachedBlue/"); request.setQueryString("SERVICE=WMS&layers=bluemarble&" + "WIDTH=512&HEIGHT=512&bbox=-52.01245495052001,-28.207099921352835,11.947593278789554," + "35.75294830795673&format=image/jpeg&version=1.1.1&srs=EPSG:4326&styles=&request=GetMap"); request.setMethod("GET"); testRecorder.clear(); wmsController.getWms(request, response); new ImageAssert(response).assertEqualImage("wms.jpg", false, DELTA); assertThat(testRecorder.matches(WmsController.TEST_RECORDER_GROUP, WmsController.TEST_RECORDER_PUT_IN_CACHE)).isEmpty(); testRecorder.clear(); wmsController.getWms(request, response); new ImageAssert(response).assertEqualImage("wms.jpg", false, DELTA); assertThat(testRecorder.matches(WmsController.TEST_RECORDER_GROUP, WmsController.TEST_RECORDER_GET_FROM_CACHE)).isEmpty(); } class ImageAssert extends TestPathBinaryStreamAssert { private MockHttpServletResponse response; public ImageAssert(MockHttpServletResponse response) { super(IMAGE_CLASS_PATH); this.response = response; } public void generateActual(OutputStream out) throws Exception { out.write(response.getContentAsByteArray()); } } }
plugin/geomajas-layer-wms/wms/src/test/java/org/geomajas/layer/wms/mvc/WmsControllerCacheTest.java
/* * This is part of Geomajas, a GIS framework, http://www.geomajas.org/. * * Copyright 2008-2012 Geosparc nv, http://www.geosparc.com/, Belgium. * * The program is available in open source according to the GNU Affero * General Public License. All contributions in this program are covered * by the Geomajas Contributors License Agreement. For full licensing * details, see LICENSE.txt in the project root. */ package org.geomajas.layer.wms.mvc; import org.geomajas.layer.wms.WmsLayer; import org.geomajas.plugin.caching.service.CacheManagerService; import org.geomajas.service.TestRecorder; import org.geomajas.testdata.TestPathBinaryStreamAssert; import org.geomajas.testdata.rule.SecurityRule; import org.junit.After; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.transaction.annotation.Transactional; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; import static org.fest.assertions.Assertions.assertThat; /** * Test for {@link org.geomajas.layer.wms.mvc.WmsController}. * * @author Joachim Van der Auwera */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = {"/org/geomajas/spring/geomajasContext.xml", "/wmsContext.xml", "/org/geomajas/spring/testRecorder.xml", "/org/geomajas/testdata/allowAll.xml"}) @Transactional public class WmsControllerCacheTest { private static final String IMAGE_CLASS_PATH = "reference"; private static final double DELTA = 1E-6; @Autowired private WmsController wmsController; @Autowired private TestRecorder testRecorder; @Autowired @Rule public SecurityRule securityRule; @Autowired private CacheManagerService cacheManagerService; @Autowired @Qualifier("cachedBlue") private WmsLayer cachedWms; @After public void clearCache() throws Exception { // clear side effects to assure the next test run works cacheManagerService.drop(cachedWms); Thread.sleep(5000); } @Test @Ignore // test fails as the passivated state is not removed, see CACHE-33 public void testReadCachedImage() throws Exception { MockHttpServletRequest request = new MockHttpServletRequest(); MockHttpServletResponse response = new MockHttpServletResponse(); Map<String, String> parameters = new HashMap<String, String>(); parameters.put("SERVICE", "WMS"); parameters.put("layers", "bluemarble"); parameters.put("WIDTH", "512"); parameters.put("HEIGHT", "512"); parameters.put("bbox", "-52.01245495052001,-28.207099921352835,11.947593278789554,35.75294830795673"); parameters.put("format", "image/jpeg"); parameters.put("version", "1.1.1"); parameters.put("srs", "EPSG:4326"); parameters.put("styles", ""); parameters.put("request", "GetMap"); request.setParameters(parameters); request.setRequestURI("d/wms/cachedBlue/"); request.setQueryString("SERVICE=WMS&layers=bluemarble&" + "WIDTH=512&HEIGHT=512&bbox=-52.01245495052001,-28.207099921352835,11.947593278789554," + "35.75294830795673&format=image/jpeg&version=1.1.1&srs=EPSG:4326&styles=&request=GetMap"); request.setMethod("GET"); testRecorder.clear(); wmsController.getWms(request, response); new ImageAssert(response).assertEqualImage("wms.jpg", false, DELTA); assertThat(testRecorder.matches(WmsController.TEST_RECORDER_GROUP, WmsController.TEST_RECORDER_PUT_IN_CACHE)).isEmpty(); testRecorder.clear(); wmsController.getWms(request, response); new ImageAssert(response).assertEqualImage("wms.jpg", false, DELTA); assertThat(testRecorder.matches(WmsController.TEST_RECORDER_GROUP, WmsController.TEST_RECORDER_GET_FROM_CACHE)).isEmpty(); } class ImageAssert extends TestPathBinaryStreamAssert { private MockHttpServletResponse response; public ImageAssert(MockHttpServletResponse response) { super(IMAGE_CLASS_PATH); this.response = response; } public void generateActual(OutputStream out) throws Exception { out.write(response.getContentAsByteArray()); } } }
WMS-29 enable cache wms test (thanks to CACHE-33)
plugin/geomajas-layer-wms/wms/src/test/java/org/geomajas/layer/wms/mvc/WmsControllerCacheTest.java
WMS-29 enable cache wms test (thanks to CACHE-33)
Java
agpl-3.0
b5f0434d0ce5c43827e51db96ba9af81675535b6
0
ow2-proactive/scheduling-portal,ow2-proactive/scheduling-portal,ow2-proactive/scheduling-portal,paraita/scheduling-portal,paraita/scheduling-portal,ShatalovYaroslav/scheduling-portal,paraita/scheduling-portal,ShatalovYaroslav/scheduling-portal,ShatalovYaroslav/scheduling-portal
/* * ProActive Parallel Suite(TM): * The Open Source library for parallel and distributed * Workflows & Scheduling, Orchestration, Cloud Automation * and Big Data Analysis on Enterprise Grids & Clouds. * * Copyright (c) 2007 - 2017 ActiveEon * Contact: [email protected] * * This library is free software: you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation: version 3 of * the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. */ package org.ow2.proactive_grid_cloud_portal.scheduler.client; import java.util.logging.Logger; import org.ow2.proactive_grid_cloud_portal.common.client.AboutWindow; import org.ow2.proactive_grid_cloud_portal.common.client.CredentialsWindow; import org.ow2.proactive_grid_cloud_portal.common.client.Images; import org.ow2.proactive_grid_cloud_portal.common.client.ImagesUnbundled; import org.ow2.proactive_grid_cloud_portal.common.client.Listeners.LogListener; import org.ow2.proactive_grid_cloud_portal.common.client.LogWindow; import org.ow2.proactive_grid_cloud_portal.common.client.ToolButtonsRender; import org.ow2.proactive_grid_cloud_portal.common.client.model.LogModel; import org.ow2.proactive_grid_cloud_portal.common.client.model.LoginModel; import org.ow2.proactive_grid_cloud_portal.common.shared.Config; import org.ow2.proactive_grid_cloud_portal.scheduler.client.SchedulerListeners.ExecutionDisplayModeListener; import org.ow2.proactive_grid_cloud_portal.scheduler.client.SchedulerListeners.SchedulerStatusListener; import org.ow2.proactive_grid_cloud_portal.scheduler.client.controller.ExecutionListMode; import org.ow2.proactive_grid_cloud_portal.scheduler.client.model.JobsModel; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.JobInfoView; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.TaskInfoView; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.grid.jobs.JobsDetailColumnsFactory; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.grid.tasks.TaskDetailColumnsFactory; import org.ow2.proactive_grid_cloud_portal.scheduler.shared.SchedulerConfig; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.HorizontalPanel; import com.smartgwt.client.types.Alignment; import com.smartgwt.client.types.Overflow; import com.smartgwt.client.types.Side; import com.smartgwt.client.types.VisibilityMode; import com.smartgwt.client.util.SC; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.Img; import com.smartgwt.client.widgets.Label; import com.smartgwt.client.widgets.grid.HoverCustomizer; import com.smartgwt.client.widgets.grid.ListGridField; import com.smartgwt.client.widgets.grid.ListGridRecord; import com.smartgwt.client.widgets.layout.HLayout; import com.smartgwt.client.widgets.layout.Layout; import com.smartgwt.client.widgets.layout.SectionStack; import com.smartgwt.client.widgets.layout.SectionStackSection; import com.smartgwt.client.widgets.layout.VLayout; import com.smartgwt.client.widgets.menu.Menu; import com.smartgwt.client.widgets.menu.MenuItem; import com.smartgwt.client.widgets.menu.MenuItemSeparator; import com.smartgwt.client.widgets.tab.Tab; import com.smartgwt.client.widgets.tab.TabSet; import com.smartgwt.client.widgets.toolbar.ToolStrip; import com.smartgwt.client.widgets.toolbar.ToolStripButton; import com.smartgwt.client.widgets.toolbar.ToolStripMenuButton; /** * Page displayed when the client is logged in * <p> * Contains and displays views for jobs, tasks, etc * * * @author mschnoor */ public class SchedulerPage implements SchedulerStatusListener, LogListener, ExecutionDisplayModeListener { static SchedulerPage inst; protected TabSet leftTabSet; protected Tab tasksTab; protected Layout tasksPane; protected Tab visuTab; protected Canvas visuPane; private static final Logger LOGGER = Logger.getLogger(SchedulerPage.class.getName()); /** Actions on the scheduler */ private static final String START = "Start"; private static final String STOP = "Stop"; private static final String FREEZE = "Freeze"; private static final String RESUME = "Resume"; private static final String PAUSE = "Pause"; private static final String KILL = "Kill"; private static final String SHUTDOWN = "Shutdown"; /** root layout: parent to all widgets of this view */ private Layout rootLayout = null; /** view displaying info about the selected job */ private JobInfoView jobInfo = null; /** view displaying info about the selected task */ private TaskInfoView taskInfo = null; /** displays connected users */ private UsersView usersView = null; /** displays scheduler & accounting stats */ private StatisticsView statsView = null; /** job visualization */ private VisualizationViewSwitcher visuView = null; /** logs for async scheduler callbacks */ private LogWindow logWindow = null; /** about this app */ private AboutWindow aboutWindow = null; /** client settings */ private SettingsWindow settingsWindow = null; private Menu adminMenu = null; /** displayed when critical log events occur */ private ToolStripButton errorButton = null; /** displays the scheduler status */ private Label schedulerStatusLabel = null; private long lastCriticalMessage = 0; private SchedulerController controller = null; // Logo strip properties private int logoStripHeight = 40; private String logoStripBackgroundColor = "#fafafa"; private String logoStripBorder = "0px"; private ToolButtonsRender toolButtonsRender = new ToolButtonsRender(); /** * Default constructor * * @param controller Controller that created this page */ public SchedulerPage(SchedulerController controller) { this.controller = controller; buildAndShow(); this.controller.getEventDispatcher().addSchedulerStatusListener(this); LogModel.getInstance().addLogListener(this); this.controller.getExecutionController().getModel().addExecutionsDisplayModeListener(this); // very ugly, only way to control the scroll viewport in VisualizationViewImage.java inst = this; } /** * Creates the layout and adds it to the page * * <pre> * +- content:VLayout --------+ * |+- tools:Widget ---------+| * || # buildTools() || * |+------------------------+| * |+- stack:SectionStack ---+| * ||+- jobSection ---------+|| * |||+- topPane:Layout ---+||| * |||| # buildTopPane() |||| * |||+--------------------+||| * ||+----------------------+|| * ||+- detailsSection -----+|| * |||+- botPane:Layout ---+||| * |||| # buildBotPane() |||| * |||+--------------------+||| * ||+----------------------+|| * |+------------------------+| * +--------------------------+ * </pre> * */ private void buildAndShow() { VLayout contentLayout = new VLayout(); this.rootLayout = contentLayout; contentLayout.setWidth100(); contentLayout.setHeight100(); contentLayout.setBackgroundColor(logoStripBackgroundColor); this.aboutWindow = new AboutWindow(); this.settingsWindow = new SettingsWindow(controller); Canvas tools = buildTools(); HorizontalPanel panel = new HorizontalPanel(); panel.setWidth("100%"); panel.setHeight("3px"); panel.getElement().getStyle().setBackgroundColor("#f47930"); panel.getElement().getStyle().setPadding(-1, Unit.PX); SectionStackSection executionsSections = this.controller.buildExecutionsView(); Layout botPane = buildBotPane(); SectionStackSection detailsSection = new SectionStackSection(); detailsSection.setTitle("Details"); detailsSection.setExpanded(true); detailsSection.setItems(botPane); SectionStack stack = new SectionStack(); stack.setWidth100(); stack.setHeight100(); stack.setMargin(2); stack.setVisibilityMode(VisibilityMode.MULTIPLE); stack.setAnimateSections(true); stack.setOverflow(Overflow.HIDDEN); stack.setSections(executionsSections, detailsSection); contentLayout.addMember(buildLogoStrip()); contentLayout.addMember(tools); contentLayout.addMember(panel); contentLayout.addMember(stack); this.logWindow = new LogWindow(controller); this.rootLayout.draw(); } /** admin scheduler functionalities */ private MenuItem schedStartButton, schedStopButton, schedFreezeButton, schedResumeButton, schedPauseButton, schedKillButton, schedShutdownButton; private ToolStrip buildLogoStrip() { final Label schedulerLabel = new Label("ProActive Scheduling & Orchestration"); schedulerLabel.setStyleName("schedulerHeadline"); schedulerLabel.setHeight100(); schedulerLabel.setAutoWidth(); ToolStrip logoPA = new ToolStrip(); logoPA.setHeight(logoStripHeight); logoPA.setWidth("33%"); logoPA.setBackgroundImage(""); logoPA.setBackgroundColor(logoStripBackgroundColor); logoPA.setMargin(0); logoPA.setBorder(logoStripBorder); logoPA.setAlign(Alignment.LEFT); logoPA.addMember(new Img(SchedulerImagesUnbundled.PA_ICON, logoStripHeight, logoStripHeight)); logoPA.addMember(schedulerLabel); ToolStrip additionalLogoCenter = new ToolStrip(); additionalLogoCenter.setHeight(logoStripHeight); additionalLogoCenter.setWidth("33%"); additionalLogoCenter.setBackgroundImage(""); additionalLogoCenter.setBackgroundColor(logoStripBackgroundColor); additionalLogoCenter.setMargin(0); additionalLogoCenter.setBorder(logoStripBorder); additionalLogoCenter.setAlign(Alignment.CENTER); Img logoAzureImg = new Img(SchedulerImagesUnbundled.EXTRA_LOGO_CENTER, 135, logoStripHeight); additionalLogoCenter.addMember(logoAzureImg); ToolStrip logoAE = new ToolStrip(); logoAE.setHeight(logoStripHeight); logoAE.setWidth("33%"); logoAE.setBackgroundImage(""); logoAE.setBackgroundColor(logoStripBackgroundColor); logoAE.setMargin(0); logoAE.setBorder(logoStripBorder); logoAE.setAlign(Alignment.RIGHT); Img logoImg = new Img(SchedulerImagesUnbundled.AE_LOGO, 146, logoStripHeight); logoImg.addClickHandler(clickEvent -> Window.open("http://activeeon.com/", "", "")); logoAE.addMember(logoImg); ToolStrip logoStrip = new ToolStrip(); logoStrip.setStyleName("paddingLeftAndRight"); logoStrip.setHeight(logoStripHeight); logoStrip.setWidth100(); logoStrip.setBackgroundImage(""); logoStrip.setBackgroundColor(logoStripBackgroundColor); logoStrip.setBorder(logoStripBorder); logoStrip.setMargin(0); logoStrip.addMember(logoPA); logoStrip.addMember(additionalLogoCenter); logoStrip.addMember(logoAE); return logoStrip; } /** * Builds and returns the toolbar * * <pre> * +- ToolStrip --------------------------------------------------------------+ * |+- Portal v -++- Admin v -++- Help v -+|+ Submit ++ Logout + +- Img ---+| * || Submit || Start || Logs ||+--------++--------+ | PA logo || * || Settings || Stop || About | +---------+| * +| Credentials|| Freeze |+----------+-----------------------------------+ * | Logout || Pause | * +------------+| Resume | * | Kill | * +-----------+ * </pre> */ @SuppressWarnings("squid:S3776") private ToolStrip buildTools() { ToolStrip tools = new ToolStrip(); tools.setHeight(50); tools.setWidth100(); tools.setBackgroundImage(""); tools.setBackgroundColor(logoStripBackgroundColor); tools.setBorder("0px"); MenuItem submitMenuItem = new MenuItem("Submit job", SchedulerImages.instance.job_submit_16().getSafeUri().asString()); submitMenuItem.addClickHandler(event -> new SubmitWindow().show()); MenuItem flatSubmitMenuItem = new MenuItem("Submit command file", SchedulerImages.instance.script_16().getSafeUri().asString()); flatSubmitMenuItem.addClickHandler(event -> new FlatSubmitWindow(SchedulerPage.this.controller).show()); MenuItem settingsMenuItem = new MenuItem("Settings", Images.instance.settings_16().getSafeUri().asString()); settingsMenuItem.addClickHandler(event -> SchedulerPage.this.settingsWindow.show()); MenuItem credMenuItem = new MenuItem("Create credentials", Images.instance.key_16().getSafeUri().asString()); credMenuItem.addClickHandler(event -> new CredentialsWindow().show()); MenuItem thirdPartyCredentialsMenuItem = new MenuItem("Manage third-party credentials", Images.instance.key_16().getSafeUri().asString()); thirdPartyCredentialsMenuItem.addClickHandler(event -> new ThirdPartyCredentialsWindow(SchedulerPage.this.controller).show()); MenuItem serversMenuItem = new MenuItem("Data servers", Images.instance.server_16().getSafeUri().asString()); serversMenuItem.addClickHandler(event -> { String url = GWT.getModuleBaseURL() + "servers?codebase=" + GWT.getHostPageBaseURL(); Window.open(url, "_blank", ""); }); MenuItem logoutMenuItem = new MenuItem("Logout", Images.instance.exit_18().getSafeUri().asString()); logoutMenuItem.addClickHandler(event -> SC.confirm("Logout", "Are you sure you want to exit?", value -> { if (value) { SchedulerPage.this.controller.logout(); } })); ToolStripMenuButton portalMenuButton = new ToolStripMenuButton("Portal"); Menu portalMenu = new Menu(); portalMenu.setItems(submitMenuItem, flatSubmitMenuItem, new MenuItemSeparator(), credMenuItem, thirdPartyCredentialsMenuItem, serversMenuItem, settingsMenuItem, new MenuItemSeparator(), logoutMenuItem); portalMenuButton.setMenu(portalMenu); MenuItem logMenuItem = new MenuItem("Display logs", Images.instance.log_16().getSafeUri().asString()); logMenuItem.addClickHandler(event -> { SchedulerPage.this.logWindow.show(); errorButton.hide(); }); MenuItem documentationMenuItem = new MenuItem("Documentation", Images.instance.icon_manual().getSafeUri().asString()); documentationMenuItem.addClickHandler(event -> { String docVersion = Config.get().getVersion().contains("SNAPSHOT") ? "dev" : Config.get().getVersion(); Window.open("http://doc.activeeon.com/" + docVersion, "", ""); }); MenuItem aboutMenuItem = new MenuItem("About", Images.instance.about_16().getSafeUri().asString()); aboutMenuItem.addClickHandler(event -> SchedulerPage.this.aboutWindow.show()); ToolStripMenuButton helpMenuButton = new ToolStripMenuButton("Help"); Menu helpMenu = new Menu(); helpMenu.setItems(logMenuItem, documentationMenuItem, aboutMenuItem); helpMenuButton.setMenu(helpMenu); ToolStripButton submitButton = new ToolStripButton("Submit job"); submitButton.setIcon(SchedulerImages.instance.job_submit_16().getSafeUri().asString()); submitButton.setIconSize(20); submitButton.setTooltip("Submit a new job"); submitButton.addClickHandler(event -> new SubmitWindow().show()); ToolStripButton planButton = new ToolStripButton("Plan job"); planButton.setIcon(SchedulerImages.instance.job_plan_16().getSafeUri().asString()); planButton.setIconSize(20); planButton.setTooltip("Plan a job"); planButton.addClickHandler(event -> new PlanWindow(SchedulerPage.this.controller).show()); schedStartButton = new MenuItem(START); schedStartButton.setIcon(SchedulerImages.instance.scheduler_start_16().getSafeUri().asString()); schedStartButton.addClickHandler(event -> SchedulerPage.this.controller.startScheduler()); schedStopButton = new MenuItem(STOP); schedStopButton.setIcon(SchedulerImages.instance.scheduler_stop_16().getSafeUri().asString()); schedStopButton.addClickHandler(event -> SchedulerPage.this.controller.stopScheduler()); schedFreezeButton = new MenuItem(FREEZE); schedFreezeButton.setIcon(SchedulerImages.instance.scheduler_freeze_16().getSafeUri().asString()); schedFreezeButton.addClickHandler(event -> SchedulerPage.this.controller.freezeScheduler()); schedResumeButton = new MenuItem(RESUME); schedResumeButton.setIcon(SchedulerImages.instance.scheduler_resume_16().getSafeUri().asString()); schedResumeButton.addClickHandler(event -> SchedulerPage.this.controller.resumeScheduler()); schedPauseButton = new MenuItem(PAUSE); schedPauseButton.setIcon(SchedulerImages.instance.scheduler_pause_16().getSafeUri().asString()); schedPauseButton.addClickHandler(event -> SchedulerPage.this.controller.pauseScheduler()); schedKillButton = new MenuItem(KILL); schedKillButton.setIcon(SchedulerImages.instance.scheduler_kill_16().getSafeUri().asString()); schedKillButton.addClickHandler(event -> SC.confirm("Do you really want to <strong>kill</strong> the Scheduler?", value -> { if (value) SchedulerPage.this.controller.killScheduler(); })); schedShutdownButton = new MenuItem(SHUTDOWN); schedShutdownButton.setIcon(SchedulerImages.instance.scheduler_shutdown_16().getSafeUri().asString()); schedShutdownButton.addClickHandler(event -> SC.confirm("Do you really want to <strong>shutdown</strong> the Scheduler?", value -> { if (value) SchedulerPage.this.controller.shutdownScheduler(); })); ToolStripMenuButton adminMenuButton = new ToolStripMenuButton("Admin"); this.adminMenu = new Menu(); this.adminMenu.setItems(schedStartButton, schedStopButton, schedFreezeButton, schedResumeButton, schedPauseButton, schedKillButton, schedShutdownButton); // Adding tooltips on Admin actions ListGridField titleFieldDefaults = adminMenu.getTitleFieldDefaults(); titleFieldDefaults.setShowHover(true); titleFieldDefaults.setHoverCustomizer(new HoverCustomizer() { @Override public String hoverHTML(Object value, ListGridRecord record, int rowNum, int colNum) { if (value.toString().equalsIgnoreCase(START)) return "Start Scheduler Server from Stopped status"; else if (value.toString().equalsIgnoreCase(STOP)) return "Stop Scheduler Server (Submitted Jobs terminate)"; else if (value.toString().equalsIgnoreCase(FREEZE)) return "Freeze Scheduler Server (Running Tasks terminate)"; else if (value.toString().equalsIgnoreCase(RESUME)) return "Resume Scheduler Server from Paused or Frozen status"; else if (value.toString().equalsIgnoreCase(PAUSE)) return "Pause Scheduler Server (Running Jobs terminate)"; else if (value.toString().equalsIgnoreCase(KILL)) return "Kill Scheduler Server"; else if (value.toString().equalsIgnoreCase(SHUTDOWN)) return "Shutdown Scheduler Server (Running Tasks terminate)"; return null; } }); adminMenuButton.setMenu(adminMenu); String login = LoginModel.getInstance().getLogin(); if (login != null) login = " <b>" + login + "</b>"; else login = ""; errorButton = new ToolStripButton("<strong>Network error</strong>", Images.instance.net_error_16().getSafeUri().asString()); errorButton.setBackgroundColor("#ffbbbb"); errorButton.addClickHandler(event -> { SchedulerPage.this.logWindow.show(); errorButton.hide(); }); errorButton.hide(); schedulerStatusLabel = new Label(SchedulerStatus.STARTED.name()); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_start_16().getSafeUri().asString()); schedulerStatusLabel.setIconSize(20); schedulerStatusLabel.setSize("105%", "105%"); HLayout schedulerStatusLabelLayout = new HLayout(); schedulerStatusLabelLayout.addMember(schedulerStatusLabel); ToolStripButton resourceManagerLinkButton = toolButtonsRender.getResourceManagerLinkButton(); ToolStripButton studioLinkButton = toolButtonsRender.getStudioLinkButton(); ToolStripButton schedulerLinkButton = toolButtonsRender.getSchedulerHighlightedLinkButton(); ToolStripButton automationDashboardLinkButton = toolButtonsRender.getAutomationDashboardLinkButton(); ToolStripButton logoutButton = toolButtonsRender.getLogoutButton(login, SchedulerPage.this.controller); tools.addMenuButton(portalMenuButton); tools.addMenuButton(adminMenuButton); tools.addMenuButton(helpMenuButton); tools.addSeparator(); tools.addButton(submitButton); tools.addSeparator(); tools.addButton(planButton); tools.addSeparator(); tools.addButton(errorButton); tools.addFill(); tools.addMember(schedulerStatusLabelLayout); tools.addFill(); tools.addButton(automationDashboardLinkButton); tools.addSpacer(12); tools.addButton(studioLinkButton); tools.addSpacer(12); tools.addButton(schedulerLinkButton); tools.addSpacer(12); tools.addButton(resourceManagerLinkButton); tools.addSpacer(2); tools.addSeparator(); tools.addSpacer(2); tools.addButton(logoutButton); tools.addSpacer(10); // disable all controls at first, next event will sort it out this.statusChanged(SchedulerStatus.KILLED); return tools; } /* * (non-Javadoc) * * @see * org.ow2.proactive_grid_cloud_portal.client.Listeners.SchedulerStatusListener#statusChanged( * org.ow2.proactive_grid_cloud_portal.shared.SchedulerStatus) */ public void statusChanged(SchedulerStatus status) { // this only changes the enable status of scheduler admin buttons switch (status) { case SHUTTING_DOWN: schedStartButton.setEnabled(false); schedStopButton.setEnabled(false); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(false); schedShutdownButton.setEnabled(false); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_shutdown_16().getSafeUri().asString()); case KILLED: schedStartButton.setEnabled(false); schedStopButton.setEnabled(false); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(false); schedShutdownButton.setEnabled(false); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_kill_16().getSafeUri().asString()); break; case FROZEN: schedStartButton.setEnabled(false); schedStopButton.setEnabled(true); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(true); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_freeze_16().getSafeUri().asString()); break; case PAUSED: schedStartButton.setEnabled(false); schedStopButton.setEnabled(true); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(true); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_pause_16().getSafeUri().asString()); break; case STARTED: case UNLINKED: schedStartButton.setEnabled(false); schedStopButton.setEnabled(true); schedFreezeButton.setEnabled(true); schedPauseButton.setEnabled(true); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_start_16().getSafeUri().asString()); break; case STOPPED: schedStartButton.setEnabled(true); schedStopButton.setEnabled(false); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_stop_16().getSafeUri().asString()); break; default: LOGGER.warning("Unexpected scheduler status"); break; } // Update the scheduler status label schedulerStatusLabel.setContents("Status:" + status.name()); this.adminMenu.redraw(); } /** * Builds and returns the bottom pane: currently selected job somewhat in a master/detail fashion * * <pre> * +- layout:HLayout ----+ * |+- leftTabs:TabSet -+| * ||+- output:Tab ----+|| * ||| sel. job output ||| * ||+-----------------+|| * ||+- tasks:Tab -----+|| * ||| sel. job tasks ||| * ||+-----------------+|| * ||+- users:Tab -----+|| | left * ||| connected users ||| | * ||+-----------------+|| | the actual widget is horizontal * |+-------------------+| | * |+- rightTabs:TabSet +| V right * ||+- infoTab:Tab ---+|| * ||| sel. job info ||| * ||+-----------------+|| * ||+- filters:Tab ---+|| * ||| jobgrid filters ||| * ||+-----------------+|| * |+-------------------+| * +---------------------+ * </pre> * */ private Layout buildBotPane() { leftTabSet = new TabSet(); leftTabSet.setWidth("50%"); leftTabSet.setHeight100(); leftTabSet.setTabBarPosition(Side.TOP); leftTabSet.setShowResizeBar(true); tasksPane = this.controller.buildTaskView(); this.buildTasksTab(); this.visuView = new VisualizationViewSwitcher(this.controller); this.visuPane = this.visuView.build(); this.buildVisuTab(); final Tab usersTab = new Tab("Users Sessions", Images.instance.user_16().getSafeUri().asString()); this.usersView = new UsersView(this.controller); usersTab.setPane(this.usersView.build()); final Tab statsTab = new Tab("Statistics", Images.instance.stats_16().getSafeUri().asString()); this.statsView = new StatisticsView(this.controller); statsTab.setPane(this.statsView.build()); final Tab usageTab = new Tab("Usage", SchedulerImages.instance.usage_16().getSafeUri().asString()); usageTab.setPane(new UsageView(this.controller).build()); leftTabSet.addTab(tasksTab); leftTabSet.addTab(visuTab); leftTabSet.addTab(usersTab); leftTabSet.addTab(statsTab); leftTabSet.addTab(usageTab); leftTabSet.addTabSelectedHandler(event -> { if (leftTabSet.getSelectedTab().equals(tasksTab)) { controller.setLazyStatsFetch(true); controller.setLazyUserFetch(true); } else if (leftTabSet.getSelectedTab().equals(usersTab)) { controller.setLazyStatsFetch(true); controller.setLazyUserFetch(false); } else if (leftTabSet.getSelectedTab().equals(statsTab)) { controller.setLazyStatsFetch(false); controller.setLazyUserFetch(true); } if (leftTabSet.getSelectedTab().equals(visuTab)) { controller.setVisuFetchEnabled(true); JobsModel jobsModel = ((SchedulerModelImpl) controller.getModel()).getExecutionsModel().getJobsModel(); if (jobsModel.getSelectedJob() != null) { controller.visuFetch(jobsModel.getSelectedJob().getId().toString()); } } else { controller.setVisuFetchEnabled(false); } }); TabSet rightTabSet = new TabSet(); rightTabSet.setWidth("50%"); rightTabSet.setHeight100(); rightTabSet.setTabBarPosition(Side.TOP); Tab jobinfoTab = new Tab("Job Info", SchedulerImages.instance.info_16().getSafeUri().asString()); this.jobInfo = new JobInfoView(this.controller, new JobsDetailColumnsFactory()); jobinfoTab.setPane(this.jobInfo.build()); Tab taskinfoTab = new Tab("Task Info", SchedulerImages.instance.info_16().getSafeUri().asString()); this.taskInfo = new TaskInfoView(this.controller, new TaskDetailColumnsFactory()); taskinfoTab.setPane(this.taskInfo.build()); Tab outputTab = new Tab("Output", SchedulerImages.instance.output_16().getSafeUri().asString()); outputTab.setPane(this.controller.buildOutputView()); Tab serverLogsTab = new Tab("Server Logs", SchedulerImages.instance.output_16().getSafeUri().asString()); serverLogsTab.setPane(this.controller.buildServerLogsView()); Tab resultTab = new Tab("Preview", Images.instance.search_16().getSafeUri().asString()); resultTab.setPane(this.controller.buildPreviewView()); rightTabSet.addTab(jobinfoTab); rightTabSet.addTab(taskinfoTab); rightTabSet.addTab(outputTab); rightTabSet.addTab(serverLogsTab); rightTabSet.addTab(resultTab); HLayout layout = new HLayout(); layout.addMember(leftTabSet); layout.addMember(rightTabSet); return layout; } /** * Removes the layout and widgets from the page * Call this when the view should be definitely removed and GC's, else just hide() it */ public void destroy() { this.rootLayout.destroy(); this.logWindow.destroy(); this.aboutWindow.destroy(); this.settingsWindow.destroy(); this.rootLayout = null; this.jobInfo = null; this.controller = null; this.logWindow = null; this.aboutWindow = null; } @Override public void logMessage(String message) { long dt = System.currentTimeMillis() - this.lastCriticalMessage; if (dt > SchedulerConfig.get().getClientRefreshTime() * 4) { this.errorButton.hide(); } } @Override public void logImportantMessage(String message) { long dt = System.currentTimeMillis() - this.lastCriticalMessage; if (dt > SchedulerConfig.get().getClientRefreshTime() * 4) { this.errorButton.hide(); } } @Override public void logCriticalMessage(String message) { this.lastCriticalMessage = System.currentTimeMillis(); this.errorButton.show(); } @Override public void modeSwitched(ExecutionListMode mode) { switch (mode) { case JOB_CENTRIC: this.buildTasksTab(); this.buildVisuTab(); leftTabSet.addTab(this.tasksTab, 0); leftTabSet.addTab(this.visuTab, 1); break; case TASK_CENTRIC: leftTabSet.updateTab(tasksTab, null); leftTabSet.removeTab(tasksTab); leftTabSet.updateTab(visuTab, null); leftTabSet.removeTab(visuTab); break; default: LOGGER.warning("Unexpected mode"); break; } leftTabSet.markForRedraw(); } protected void buildTasksTab() { tasksTab = new Tab("Tasks", SchedulerImages.instance.monitoring_16().getSafeUri().asString()); tasksTab.setPane(tasksPane); } protected void buildVisuTab() { visuTab = new Tab("Visualization", ImagesUnbundled.PA_16); visuTab.setPane(this.visuPane); } }
scheduler-portal/src/main/java/org/ow2/proactive_grid_cloud_portal/scheduler/client/SchedulerPage.java
/* * ProActive Parallel Suite(TM): * The Open Source library for parallel and distributed * Workflows & Scheduling, Orchestration, Cloud Automation * and Big Data Analysis on Enterprise Grids & Clouds. * * Copyright (c) 2007 - 2017 ActiveEon * Contact: [email protected] * * This library is free software: you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation: version 3 of * the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. */ package org.ow2.proactive_grid_cloud_portal.scheduler.client; import java.util.logging.Logger; import org.ow2.proactive_grid_cloud_portal.common.client.AboutWindow; import org.ow2.proactive_grid_cloud_portal.common.client.CredentialsWindow; import org.ow2.proactive_grid_cloud_portal.common.client.Images; import org.ow2.proactive_grid_cloud_portal.common.client.ImagesUnbundled; import org.ow2.proactive_grid_cloud_portal.common.client.Listeners.LogListener; import org.ow2.proactive_grid_cloud_portal.common.client.LogWindow; import org.ow2.proactive_grid_cloud_portal.common.client.ToolButtonsRender; import org.ow2.proactive_grid_cloud_portal.common.client.model.LogModel; import org.ow2.proactive_grid_cloud_portal.common.client.model.LoginModel; import org.ow2.proactive_grid_cloud_portal.common.shared.Config; import org.ow2.proactive_grid_cloud_portal.scheduler.client.SchedulerListeners.ExecutionDisplayModeListener; import org.ow2.proactive_grid_cloud_portal.scheduler.client.SchedulerListeners.SchedulerStatusListener; import org.ow2.proactive_grid_cloud_portal.scheduler.client.controller.ExecutionListMode; import org.ow2.proactive_grid_cloud_portal.scheduler.client.model.JobsModel; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.JobInfoView; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.TaskInfoView; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.grid.jobs.JobsDetailColumnsFactory; import org.ow2.proactive_grid_cloud_portal.scheduler.client.view.grid.tasks.TaskDetailColumnsFactory; import org.ow2.proactive_grid_cloud_portal.scheduler.shared.SchedulerConfig; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.HorizontalPanel; import com.smartgwt.client.types.Alignment; import com.smartgwt.client.types.Overflow; import com.smartgwt.client.types.Side; import com.smartgwt.client.types.VisibilityMode; import com.smartgwt.client.util.SC; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.Img; import com.smartgwt.client.widgets.Label; import com.smartgwt.client.widgets.grid.HoverCustomizer; import com.smartgwt.client.widgets.grid.ListGridField; import com.smartgwt.client.widgets.grid.ListGridRecord; import com.smartgwt.client.widgets.layout.HLayout; import com.smartgwt.client.widgets.layout.Layout; import com.smartgwt.client.widgets.layout.SectionStack; import com.smartgwt.client.widgets.layout.SectionStackSection; import com.smartgwt.client.widgets.layout.VLayout; import com.smartgwt.client.widgets.menu.Menu; import com.smartgwt.client.widgets.menu.MenuItem; import com.smartgwt.client.widgets.menu.MenuItemSeparator; import com.smartgwt.client.widgets.tab.Tab; import com.smartgwt.client.widgets.tab.TabSet; import com.smartgwt.client.widgets.toolbar.ToolStrip; import com.smartgwt.client.widgets.toolbar.ToolStripButton; import com.smartgwt.client.widgets.toolbar.ToolStripMenuButton; /** * Page displayed when the client is logged in * <p> * Contains and displays views for jobs, tasks, etc * * * @author mschnoor */ public class SchedulerPage implements SchedulerStatusListener, LogListener, ExecutionDisplayModeListener { static SchedulerPage inst; protected TabSet leftTabSet; protected Tab tasksTab; protected Layout tasksPane; protected Tab visuTab; protected Canvas visuPane; private static final Logger LOGGER = Logger.getLogger(SchedulerPage.class.getName()); /** Actions on the scheduler */ private static final String START = "Start"; private static final String STOP = "Stop"; private static final String FREEZE = "Freeze"; private static final String RESUME = "Resume"; private static final String PAUSE = "Pause"; private static final String KILL = "Kill"; private static final String SHUTDOWN = "Shutdown"; /** root layout: parent to all widgets of this view */ private Layout rootLayout = null; /** view displaying info about the selected job */ private JobInfoView jobInfo = null; /** view displaying info about the selected task */ private TaskInfoView taskInfo = null; /** displays connected users */ private UsersView usersView = null; /** displays scheduler & accounting stats */ private StatisticsView statsView = null; /** job visualization */ private VisualizationViewSwitcher visuView = null; /** logs for async scheduler callbacks */ private LogWindow logWindow = null; /** about this app */ private AboutWindow aboutWindow = null; /** client settings */ private SettingsWindow settingsWindow = null; private Menu adminMenu = null; /** displayed when critical log events occur */ private ToolStripButton errorButton = null; /** displays the scheduler status */ private Label schedulerStatusLabel = null; private long lastCriticalMessage = 0; private SchedulerController controller = null; // Logo strip properties private int logoStripHeight = 40; private String logoStripBackgroundColor = "#fafafa"; private String logoStripBorder = "0px"; private ToolButtonsRender toolButtonsRender = new ToolButtonsRender(); /** * Default constructor * * @param controller Controller that created this page */ public SchedulerPage(SchedulerController controller) { this.controller = controller; buildAndShow(); this.controller.getEventDispatcher().addSchedulerStatusListener(this); LogModel.getInstance().addLogListener(this); this.controller.getExecutionController().getModel().addExecutionsDisplayModeListener(this); // very ugly, only way to control the scroll viewport in VisualizationViewImage.java inst = this; } /** * Creates the layout and adds it to the page * * <pre> * +- content:VLayout --------+ * |+- tools:Widget ---------+| * || # buildTools() || * |+------------------------+| * |+- stack:SectionStack ---+| * ||+- jobSection ---------+|| * |||+- topPane:Layout ---+||| * |||| # buildTopPane() |||| * |||+--------------------+||| * ||+----------------------+|| * ||+- detailsSection -----+|| * |||+- botPane:Layout ---+||| * |||| # buildBotPane() |||| * |||+--------------------+||| * ||+----------------------+|| * |+------------------------+| * +--------------------------+ * </pre> * */ private void buildAndShow() { VLayout contentLayout = new VLayout(); this.rootLayout = contentLayout; contentLayout.setWidth100(); contentLayout.setHeight100(); contentLayout.setBackgroundColor(logoStripBackgroundColor); this.aboutWindow = new AboutWindow(); this.settingsWindow = new SettingsWindow(controller); Canvas tools = buildTools(); HorizontalPanel panel = new HorizontalPanel(); panel.setWidth("100%"); panel.setHeight("3px"); panel.getElement().getStyle().setBackgroundColor("#f47930"); panel.getElement().getStyle().setPadding(-1, Unit.PX); SectionStackSection executionsSections = this.controller.buildExecutionsView(); Layout botPane = buildBotPane(); SectionStackSection detailsSection = new SectionStackSection(); detailsSection.setTitle("Details"); detailsSection.setExpanded(true); detailsSection.setItems(botPane); SectionStack stack = new SectionStack(); stack.setWidth100(); stack.setHeight100(); stack.setMargin(2); stack.setVisibilityMode(VisibilityMode.MULTIPLE); stack.setAnimateSections(true); stack.setOverflow(Overflow.HIDDEN); stack.setSections(executionsSections, detailsSection); contentLayout.addMember(buildLogoStrip()); contentLayout.addMember(tools); contentLayout.addMember(panel); contentLayout.addMember(stack); this.logWindow = new LogWindow(controller); this.rootLayout.draw(); } /** admin scheduler functionalities */ private MenuItem schedStartButton, schedStopButton, schedFreezeButton, schedResumeButton, schedPauseButton, schedKillButton, schedShutdownButton; private ToolStrip buildLogoStrip() { final Label schedulerLabel = new Label("ProActive Scheduling & Orchestration"); schedulerLabel.setStyleName("schedulerHeadline"); schedulerLabel.setHeight100(); schedulerLabel.setAutoWidth(); ToolStrip logoPA = new ToolStrip(); logoPA.setHeight(logoStripHeight); logoPA.setWidth("33%"); logoPA.setBackgroundImage(""); logoPA.setBackgroundColor(logoStripBackgroundColor); logoPA.setMargin(0); logoPA.setBorder(logoStripBorder); logoPA.setAlign(Alignment.LEFT); logoPA.addMember(new Img(SchedulerImagesUnbundled.PA_ICON, logoStripHeight, logoStripHeight)); logoPA.addMember(schedulerLabel); ToolStrip additionalLogoCenter = new ToolStrip(); additionalLogoCenter.setHeight(logoStripHeight); additionalLogoCenter.setWidth("33%"); additionalLogoCenter.setBackgroundImage(""); additionalLogoCenter.setBackgroundColor(logoStripBackgroundColor); additionalLogoCenter.setMargin(0); additionalLogoCenter.setBorder(logoStripBorder); additionalLogoCenter.setAlign(Alignment.CENTER); Img logoAzureImg = new Img(SchedulerImagesUnbundled.EXTRA_LOGO_CENTER, 135, logoStripHeight); additionalLogoCenter.addMember(logoAzureImg); ToolStrip logoAE = new ToolStrip(); logoAE.setHeight(logoStripHeight); logoAE.setWidth("33%"); logoAE.setBackgroundImage(""); logoAE.setBackgroundColor(logoStripBackgroundColor); logoAE.setMargin(0); logoAE.setBorder(logoStripBorder); logoAE.setAlign(Alignment.RIGHT); Img logoImg = new Img(SchedulerImagesUnbundled.AE_LOGO, 146, logoStripHeight); logoImg.addClickHandler(clickEvent -> Window.open("http://activeeon.com/", "", "")); logoAE.addMember(logoImg); ToolStrip logoStrip = new ToolStrip(); logoStrip.setStyleName("paddingLeftAndRight"); logoStrip.setHeight(logoStripHeight); logoStrip.setWidth100(); logoStrip.setBackgroundImage(""); logoStrip.setBackgroundColor(logoStripBackgroundColor); logoStrip.setBorder(logoStripBorder); logoStrip.setMargin(0); logoStrip.addMember(logoPA); logoStrip.addMember(additionalLogoCenter); logoStrip.addMember(logoAE); return logoStrip; } /** * Builds and returns the toolbar * * <pre> * +- ToolStrip --------------------------------------------------------------+ * |+- Portal v -++- Admin v -++- Help v -+|+ Submit ++ Logout + +- Img ---+| * || Submit || Start || Logs ||+--------++--------+ | PA logo || * || Settings || Stop || About | +---------+| * +| Credentials|| Freeze |+----------+-----------------------------------+ * | Logout || Pause | * +------------+| Resume | * | Kill | * +-----------+ * </pre> */ @SuppressWarnings("squid:S3776") private ToolStrip buildTools() { ToolStrip tools = new ToolStrip(); tools.setHeight(50); tools.setWidth100(); tools.setBackgroundImage(""); tools.setBackgroundColor(logoStripBackgroundColor); tools.setBorder("0px"); MenuItem submitMenuItem = new MenuItem("Submit job", SchedulerImages.instance.job_submit_16().getSafeUri().asString()); submitMenuItem.addClickHandler(event -> new SubmitWindow().show()); MenuItem flatSubmitMenuItem = new MenuItem("Submit command file", SchedulerImages.instance.script_16().getSafeUri().asString()); flatSubmitMenuItem.addClickHandler(event -> new FlatSubmitWindow(SchedulerPage.this.controller).show()); MenuItem settingsMenuItem = new MenuItem("Settings", Images.instance.settings_16().getSafeUri().asString()); settingsMenuItem.addClickHandler(event -> SchedulerPage.this.settingsWindow.show()); MenuItem credMenuItem = new MenuItem("Create credentials", Images.instance.key_16().getSafeUri().asString()); credMenuItem.addClickHandler(event -> new CredentialsWindow().show()); MenuItem thirdPartyCredentialsMenuItem = new MenuItem("Manage third-party credentials", Images.instance.key_16().getSafeUri().asString()); thirdPartyCredentialsMenuItem.addClickHandler(event -> new ThirdPartyCredentialsWindow(SchedulerPage.this.controller).show()); MenuItem serversMenuItem = new MenuItem("Data servers", Images.instance.server_16().getSafeUri().asString()); serversMenuItem.addClickHandler(event -> { String url = GWT.getModuleBaseURL() + "servers?codebase=" + GWT.getHostPageBaseURL(); Window.open(url, "_blank", ""); }); MenuItem logoutMenuItem = new MenuItem("Logout", Images.instance.exit_18().getSafeUri().asString()); logoutMenuItem.addClickHandler(event -> SC.confirm("Logout", "Are you sure you want to exit?", value -> { if (value) { SchedulerPage.this.controller.logout(); } })); ToolStripMenuButton portalMenuButton = new ToolStripMenuButton("Portal"); Menu portalMenu = new Menu(); portalMenu.setItems(submitMenuItem, flatSubmitMenuItem, new MenuItemSeparator(), credMenuItem, thirdPartyCredentialsMenuItem, serversMenuItem, settingsMenuItem, new MenuItemSeparator(), logoutMenuItem); portalMenuButton.setMenu(portalMenu); MenuItem logMenuItem = new MenuItem("Display logs", Images.instance.log_16().getSafeUri().asString()); logMenuItem.addClickHandler(event -> { SchedulerPage.this.logWindow.show(); errorButton.hide(); }); MenuItem documentationMenuItem = new MenuItem("Documentation", Images.instance.icon_manual().getSafeUri().asString()); documentationMenuItem.addClickHandler(event -> { String docVersion = Config.get().getVersion().contains("SNAPSHOT") ? "dev" : Config.get().getVersion(); Window.open("http://doc.activeeon.com/" + docVersion, "", ""); }); MenuItem aboutMenuItem = new MenuItem("About", Images.instance.about_16().getSafeUri().asString()); aboutMenuItem.addClickHandler(event -> SchedulerPage.this.aboutWindow.show()); ToolStripMenuButton helpMenuButton = new ToolStripMenuButton("Help"); Menu helpMenu = new Menu(); helpMenu.setItems(logMenuItem, documentationMenuItem, aboutMenuItem); helpMenuButton.setMenu(helpMenu); ToolStripButton submitButton = new ToolStripButton("Submit job"); submitButton.setIcon(SchedulerImages.instance.job_submit_16().getSafeUri().asString()); submitButton.setIconSize(20); submitButton.setTooltip("Submit a new job"); submitButton.addClickHandler(event -> new SubmitWindow().show()); ToolStripButton planButton = new ToolStripButton("Plan job"); planButton.setIcon(SchedulerImages.instance.job_plan_16().getSafeUri().asString()); planButton.setIconSize(20); planButton.setTooltip("Plan a job"); planButton.addClickHandler(event -> new PlanWindow(SchedulerPage.this.controller).show()); schedStartButton = new MenuItem(START); schedStartButton.setIcon(SchedulerImages.instance.scheduler_start_16().getSafeUri().asString()); schedStartButton.addClickHandler(event -> SchedulerPage.this.controller.startScheduler()); schedStopButton = new MenuItem(STOP); schedStopButton.setIcon(SchedulerImages.instance.scheduler_stop_16().getSafeUri().asString()); schedStopButton.addClickHandler(event -> SchedulerPage.this.controller.stopScheduler()); schedFreezeButton = new MenuItem(FREEZE); schedFreezeButton.setIcon(SchedulerImages.instance.scheduler_freeze_16().getSafeUri().asString()); schedFreezeButton.addClickHandler(event -> SchedulerPage.this.controller.freezeScheduler()); schedResumeButton = new MenuItem(RESUME); schedResumeButton.setIcon(SchedulerImages.instance.scheduler_resume_16().getSafeUri().asString()); schedResumeButton.addClickHandler(event -> SchedulerPage.this.controller.resumeScheduler()); schedPauseButton = new MenuItem(PAUSE); schedPauseButton.setIcon(SchedulerImages.instance.scheduler_pause_16().getSafeUri().asString()); schedPauseButton.addClickHandler(event -> SchedulerPage.this.controller.pauseScheduler()); schedKillButton = new MenuItem(KILL); schedKillButton.setIcon(SchedulerImages.instance.scheduler_kill_16().getSafeUri().asString()); schedKillButton.addClickHandler(event -> SC.confirm("Do you really want to <strong>kill</strong> the Scheduler?", value -> { if (value) SchedulerPage.this.controller.killScheduler(); })); schedShutdownButton = new MenuItem(SHUTDOWN); schedShutdownButton.setIcon(SchedulerImages.instance.scheduler_shutdown_16().getSafeUri().asString()); schedShutdownButton.addClickHandler(event -> SC.confirm("Do you really want to <strong>shutdown</strong> the Scheduler?", value -> { if (value) SchedulerPage.this.controller.shutdownScheduler(); })); ToolStripMenuButton adminMenuButton = new ToolStripMenuButton("Admin"); this.adminMenu = new Menu(); this.adminMenu.setItems(schedStartButton, schedStopButton, schedFreezeButton, schedResumeButton, schedPauseButton, schedKillButton, schedShutdownButton); // Adding tooltips on Admin actions ListGridField titleFieldDefaults = adminMenu.getTitleFieldDefaults(); titleFieldDefaults.setShowHover(true); titleFieldDefaults.setHoverCustomizer(new HoverCustomizer() { @Override public String hoverHTML(Object value, ListGridRecord record, int rowNum, int colNum) { if (value.toString().equalsIgnoreCase(START)) return "Start Scheduler Server from Stopped status"; else if (value.toString().equalsIgnoreCase(STOP)) return "Stop Scheduler Server (Submitted Jobs terminate)"; else if (value.toString().equalsIgnoreCase(FREEZE)) return "Freeze Scheduler Server (Running Tasks terminate)"; else if (value.toString().equalsIgnoreCase(RESUME)) return "Resume Scheduler Server from Paused or Frozen status"; else if (value.toString().equalsIgnoreCase(PAUSE)) return "Pause Scheduler Server (Running Jobs terminate)"; else if (value.toString().equalsIgnoreCase(KILL)) return "Kill Scheduler Server"; else if (value.toString().equalsIgnoreCase(SHUTDOWN)) return "Shutdown Scheduler Server (Freeze and Kill)"; return null; } }); adminMenuButton.setMenu(adminMenu); String login = LoginModel.getInstance().getLogin(); if (login != null) login = " <b>" + login + "</b>"; else login = ""; errorButton = new ToolStripButton("<strong>Network error</strong>", Images.instance.net_error_16().getSafeUri().asString()); errorButton.setBackgroundColor("#ffbbbb"); errorButton.addClickHandler(event -> { SchedulerPage.this.logWindow.show(); errorButton.hide(); }); errorButton.hide(); schedulerStatusLabel = new Label(SchedulerStatus.STARTED.name()); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_start_16().getSafeUri().asString()); schedulerStatusLabel.setIconSize(20); schedulerStatusLabel.setSize("105%", "105%"); HLayout schedulerStatusLabelLayout = new HLayout(); schedulerStatusLabelLayout.addMember(schedulerStatusLabel); ToolStripButton resourceManagerLinkButton = toolButtonsRender.getResourceManagerLinkButton(); ToolStripButton studioLinkButton = toolButtonsRender.getStudioLinkButton(); ToolStripButton schedulerLinkButton = toolButtonsRender.getSchedulerHighlightedLinkButton(); ToolStripButton automationDashboardLinkButton = toolButtonsRender.getAutomationDashboardLinkButton(); ToolStripButton logoutButton = toolButtonsRender.getLogoutButton(login, SchedulerPage.this.controller); tools.addMenuButton(portalMenuButton); tools.addMenuButton(adminMenuButton); tools.addMenuButton(helpMenuButton); tools.addSeparator(); tools.addButton(submitButton); tools.addSeparator(); tools.addButton(planButton); tools.addSeparator(); tools.addButton(errorButton); tools.addFill(); tools.addMember(schedulerStatusLabelLayout); tools.addFill(); tools.addButton(automationDashboardLinkButton); tools.addSpacer(12); tools.addButton(studioLinkButton); tools.addSpacer(12); tools.addButton(schedulerLinkButton); tools.addSpacer(12); tools.addButton(resourceManagerLinkButton); tools.addSpacer(2); tools.addSeparator(); tools.addSpacer(2); tools.addButton(logoutButton); tools.addSpacer(10); // disable all controls at first, next event will sort it out this.statusChanged(SchedulerStatus.KILLED); return tools; } /* * (non-Javadoc) * * @see * org.ow2.proactive_grid_cloud_portal.client.Listeners.SchedulerStatusListener#statusChanged( * org.ow2.proactive_grid_cloud_portal.shared.SchedulerStatus) */ public void statusChanged(SchedulerStatus status) { // this only changes the enable status of scheduler admin buttons switch (status) { case SHUTTING_DOWN: schedStartButton.setEnabled(false); schedStopButton.setEnabled(false); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(false); schedShutdownButton.setEnabled(false); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_shutdown_16().getSafeUri().asString()); case KILLED: schedStartButton.setEnabled(false); schedStopButton.setEnabled(false); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(false); schedShutdownButton.setEnabled(false); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_kill_16().getSafeUri().asString()); break; case FROZEN: schedStartButton.setEnabled(false); schedStopButton.setEnabled(true); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(true); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_freeze_16().getSafeUri().asString()); break; case PAUSED: schedStartButton.setEnabled(false); schedStopButton.setEnabled(true); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(true); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_pause_16().getSafeUri().asString()); break; case STARTED: case UNLINKED: schedStartButton.setEnabled(false); schedStopButton.setEnabled(true); schedFreezeButton.setEnabled(true); schedPauseButton.setEnabled(true); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_start_16().getSafeUri().asString()); break; case STOPPED: schedStartButton.setEnabled(true); schedStopButton.setEnabled(false); schedFreezeButton.setEnabled(false); schedPauseButton.setEnabled(false); schedResumeButton.setEnabled(false); schedKillButton.setEnabled(true); schedShutdownButton.setEnabled(true); schedulerStatusLabel.setIcon(SchedulerImages.instance.scheduler_stop_16().getSafeUri().asString()); break; default: LOGGER.warning("Unexpected scheduler status"); break; } // Update the scheduler status label schedulerStatusLabel.setContents("Status:" + status.name()); this.adminMenu.redraw(); } /** * Builds and returns the bottom pane: currently selected job somewhat in a master/detail fashion * * <pre> * +- layout:HLayout ----+ * |+- leftTabs:TabSet -+| * ||+- output:Tab ----+|| * ||| sel. job output ||| * ||+-----------------+|| * ||+- tasks:Tab -----+|| * ||| sel. job tasks ||| * ||+-----------------+|| * ||+- users:Tab -----+|| | left * ||| connected users ||| | * ||+-----------------+|| | the actual widget is horizontal * |+-------------------+| | * |+- rightTabs:TabSet +| V right * ||+- infoTab:Tab ---+|| * ||| sel. job info ||| * ||+-----------------+|| * ||+- filters:Tab ---+|| * ||| jobgrid filters ||| * ||+-----------------+|| * |+-------------------+| * +---------------------+ * </pre> * */ private Layout buildBotPane() { leftTabSet = new TabSet(); leftTabSet.setWidth("50%"); leftTabSet.setHeight100(); leftTabSet.setTabBarPosition(Side.TOP); leftTabSet.setShowResizeBar(true); tasksPane = this.controller.buildTaskView(); this.buildTasksTab(); this.visuView = new VisualizationViewSwitcher(this.controller); this.visuPane = this.visuView.build(); this.buildVisuTab(); final Tab usersTab = new Tab("Users Sessions", Images.instance.user_16().getSafeUri().asString()); this.usersView = new UsersView(this.controller); usersTab.setPane(this.usersView.build()); final Tab statsTab = new Tab("Statistics", Images.instance.stats_16().getSafeUri().asString()); this.statsView = new StatisticsView(this.controller); statsTab.setPane(this.statsView.build()); final Tab usageTab = new Tab("Usage", SchedulerImages.instance.usage_16().getSafeUri().asString()); usageTab.setPane(new UsageView(this.controller).build()); leftTabSet.addTab(tasksTab); leftTabSet.addTab(visuTab); leftTabSet.addTab(usersTab); leftTabSet.addTab(statsTab); leftTabSet.addTab(usageTab); leftTabSet.addTabSelectedHandler(event -> { if (leftTabSet.getSelectedTab().equals(tasksTab)) { controller.setLazyStatsFetch(true); controller.setLazyUserFetch(true); } else if (leftTabSet.getSelectedTab().equals(usersTab)) { controller.setLazyStatsFetch(true); controller.setLazyUserFetch(false); } else if (leftTabSet.getSelectedTab().equals(statsTab)) { controller.setLazyStatsFetch(false); controller.setLazyUserFetch(true); } if (leftTabSet.getSelectedTab().equals(visuTab)) { controller.setVisuFetchEnabled(true); JobsModel jobsModel = ((SchedulerModelImpl) controller.getModel()).getExecutionsModel().getJobsModel(); if (jobsModel.getSelectedJob() != null) { controller.visuFetch(jobsModel.getSelectedJob().getId().toString()); } } else { controller.setVisuFetchEnabled(false); } }); TabSet rightTabSet = new TabSet(); rightTabSet.setWidth("50%"); rightTabSet.setHeight100(); rightTabSet.setTabBarPosition(Side.TOP); Tab jobinfoTab = new Tab("Job Info", SchedulerImages.instance.info_16().getSafeUri().asString()); this.jobInfo = new JobInfoView(this.controller, new JobsDetailColumnsFactory()); jobinfoTab.setPane(this.jobInfo.build()); Tab taskinfoTab = new Tab("Task Info", SchedulerImages.instance.info_16().getSafeUri().asString()); this.taskInfo = new TaskInfoView(this.controller, new TaskDetailColumnsFactory()); taskinfoTab.setPane(this.taskInfo.build()); Tab outputTab = new Tab("Output", SchedulerImages.instance.output_16().getSafeUri().asString()); outputTab.setPane(this.controller.buildOutputView()); Tab serverLogsTab = new Tab("Server Logs", SchedulerImages.instance.output_16().getSafeUri().asString()); serverLogsTab.setPane(this.controller.buildServerLogsView()); Tab resultTab = new Tab("Preview", Images.instance.search_16().getSafeUri().asString()); resultTab.setPane(this.controller.buildPreviewView()); rightTabSet.addTab(jobinfoTab); rightTabSet.addTab(taskinfoTab); rightTabSet.addTab(outputTab); rightTabSet.addTab(serverLogsTab); rightTabSet.addTab(resultTab); HLayout layout = new HLayout(); layout.addMember(leftTabSet); layout.addMember(rightTabSet); return layout; } /** * Removes the layout and widgets from the page * Call this when the view should be definitely removed and GC's, else just hide() it */ public void destroy() { this.rootLayout.destroy(); this.logWindow.destroy(); this.aboutWindow.destroy(); this.settingsWindow.destroy(); this.rootLayout = null; this.jobInfo = null; this.controller = null; this.logWindow = null; this.aboutWindow = null; } @Override public void logMessage(String message) { long dt = System.currentTimeMillis() - this.lastCriticalMessage; if (dt > SchedulerConfig.get().getClientRefreshTime() * 4) { this.errorButton.hide(); } } @Override public void logImportantMessage(String message) { long dt = System.currentTimeMillis() - this.lastCriticalMessage; if (dt > SchedulerConfig.get().getClientRefreshTime() * 4) { this.errorButton.hide(); } } @Override public void logCriticalMessage(String message) { this.lastCriticalMessage = System.currentTimeMillis(); this.errorButton.show(); } @Override public void modeSwitched(ExecutionListMode mode) { switch (mode) { case JOB_CENTRIC: this.buildTasksTab(); this.buildVisuTab(); leftTabSet.addTab(this.tasksTab, 0); leftTabSet.addTab(this.visuTab, 1); break; case TASK_CENTRIC: leftTabSet.updateTab(tasksTab, null); leftTabSet.removeTab(tasksTab); leftTabSet.updateTab(visuTab, null); leftTabSet.removeTab(visuTab); break; default: LOGGER.warning("Unexpected mode"); break; } leftTabSet.markForRedraw(); } protected void buildTasksTab() { tasksTab = new Tab("Tasks", SchedulerImages.instance.monitoring_16().getSafeUri().asString()); tasksTab.setPane(tasksPane); } protected void buildVisuTab() { visuTab = new Tab("Visualization", ImagesUnbundled.PA_16); visuTab.setPane(this.visuPane); } }
Modify scheduler shutdown button label
scheduler-portal/src/main/java/org/ow2/proactive_grid_cloud_portal/scheduler/client/SchedulerPage.java
Modify scheduler shutdown button label
Java
lgpl-2.1
d42512c4b53beffa72d4843249d55e3cb6e4b005
0
jagazee/teiid-8.7,jagazee/teiid-8.7,kenweezy/teiid,kenweezy/teiid,kenweezy/teiid
/* * JBoss, Home of Professional Open Source. * Copyright (C) 2008 Red Hat, Inc. * Copyright (C) 2000-2007 MetaMatrix, Inc. * Licensed to Red Hat, Inc. under one or more contributor * license agreements. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. */ package com.metamatrix.common.log; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import junit.extensions.TestSetup; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import com.metamatrix.common.config.CurrentConfiguration; import com.metamatrix.common.messaging.MessageBusConstants; import com.metamatrix.core.CoreConstants; import com.metamatrix.core.log.LogListener; import com.metamatrix.core.log.LogMessage; import com.metamatrix.core.log.MessageLevel; import com.metamatrix.internal.core.log.PlatformLog; /** * This test case tests the LogManager. */ public class TestLogManager extends TestCase { /** * Constructor for TestLogManager. * @param name */ public TestLogManager(String name) { super(name); } public static Test suite() { TestSuite suite = new TestSuite(); suite.addTestSuite(TestLogManager.class); //return suite; return new TestSetup(suite){ protected void setUp() throws Exception{ setUpOnce(); } protected void tearDown() throws Exception{ } }; } private static void setUpOnce() throws Exception { System.setProperty(CoreConstants.NO_CONFIGURATION, "");//$NON-NLS-1$ System.setProperty(MessageBusConstants.MESSAGE_BUS_TYPE, MessageBusConstants.TYPE_NOOP); CurrentConfiguration.reset(); LogManager.stop(); } // ========================================================================= // T E S T C A S E S // ========================================================================= /* * Test for boolean isMessageToBeRecorded(String, int) */ public void testIsMessageToBeRecordedStringI() { assertFalse(LogManager.isMessageToBeRecorded("SomeContext", MessageLevel.CRITICAL) ); //$NON-NLS-1$ } /** * Test that all msgs logged are equal and output in same order. */ public void testLogMessage() throws Exception { // init the LogManager (do not rely on later calls to do it) LogConfiguration cfg = LogManager.getLogConfiguration(); cfg.setMessageLevel( MessageLevel.INFO ); ListLogger listener = new ListLogger(6); PlatformLog logger = PlatformLog.getInstance(); logger.addListener(listener); List sentMsgList = new ArrayList(); sentMsgList.add("A message 1"); //$NON-NLS-1$ sentMsgList.add("A message 2"); //$NON-NLS-1$ sentMsgList.add("A message 3"); //$NON-NLS-1$ sentMsgList.add("A message 4"); //$NON-NLS-1$ sentMsgList.add("A message 5"); //$NON-NLS-1$ sentMsgList.add("A message 6"); //$NON-NLS-1$ for (Iterator iter = sentMsgList.iterator(); iter.hasNext();) { String msg = (String) iter.next(); LogManager.logInfo("SomeContext", msg); //$NON-NLS-1$ } List recevedMsgList = listener.getLoggedMessages(); assertEquals(sentMsgList, recevedMsgList); } /** * * A log listener that saves messages (IStatus)s in a * List for later comparison. */ class ListLogger implements LogListener { private List messages = new ArrayList(); private int expectedMessages; public ListLogger(int expectedMessages) { this.expectedMessages = expectedMessages; } /* (non-Javadoc) * @see com.metamatrix.core.log.LogListener#logMessage(org.eclipse.core.runtime.IStatus, long, java.lang.String, java.lang.String) */ public synchronized void logMessage(LogMessage msg){ this.messages.add(msg.getText()); if (this.messages.size() == expectedMessages) { this.notifyAll(); } } /* (non-Javadoc) * @see com.metamatrix.core.log.LogListener#shutdown() */ public void shutdown() { messages.clear(); messages = null; } public int size() { return this.messages.size(); } public synchronized List getLoggedMessages() throws InterruptedException { if (this.messages.size() < expectedMessages) { this.wait(1000); } return this.messages; } } }
federate-common-internal/src/test/java/com/metamatrix/common/log/TestLogManager.java
/* * JBoss, Home of Professional Open Source. * Copyright (C) 2008 Red Hat, Inc. * Copyright (C) 2000-2007 MetaMatrix, Inc. * Licensed to Red Hat, Inc. under one or more contributor * license agreements. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. */ package com.metamatrix.common.log; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import junit.extensions.TestSetup; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import com.metamatrix.common.config.CurrentConfiguration; import com.metamatrix.common.messaging.MessageBusConstants; import com.metamatrix.core.CoreConstants; import com.metamatrix.core.log.LogListener; import com.metamatrix.core.log.LogMessage; import com.metamatrix.core.log.MessageLevel; import com.metamatrix.internal.core.log.PlatformLog; /** * This test case tests the LogManager. */ public class TestLogManager extends TestCase { /** * Constructor for TestLogManager. * @param name */ public TestLogManager(String name) { super(name); } public static Test suite() { TestSuite suite = new TestSuite(); suite.addTestSuite(TestLogManager.class); //return suite; return new TestSetup(suite){ protected void setUp() throws Exception{ setUpOnce(); } protected void tearDown() throws Exception{ } }; } private static void setUpOnce() throws Exception { CurrentConfiguration.reset(); LogManager.stop(); System.setProperty(CoreConstants.NO_CONFIGURATION, "");//$NON-NLS-1$ System.setProperty(MessageBusConstants.MESSAGE_BUS_TYPE, MessageBusConstants.TYPE_NOOP); } // ========================================================================= // T E S T C A S E S // ========================================================================= /* * Test for boolean isMessageToBeRecorded(String, int) */ public void testIsMessageToBeRecordedStringI() { assertFalse(LogManager.isMessageToBeRecorded("SomeContext", MessageLevel.CRITICAL) ); //$NON-NLS-1$ } /** * Test that all msgs logged are equal and output in same order. */ public void testLogMessage() throws Exception { // init the LogManager (do not rely on later calls to do it) LogConfiguration cfg = LogManager.getLogConfiguration(); cfg.setMessageLevel( MessageLevel.INFO ); ListLogger listener = new ListLogger(6); PlatformLog logger = PlatformLog.getInstance(); logger.addListener(listener); List sentMsgList = new ArrayList(); sentMsgList.add("A message 1"); //$NON-NLS-1$ sentMsgList.add("A message 2"); //$NON-NLS-1$ sentMsgList.add("A message 3"); //$NON-NLS-1$ sentMsgList.add("A message 4"); //$NON-NLS-1$ sentMsgList.add("A message 5"); //$NON-NLS-1$ sentMsgList.add("A message 6"); //$NON-NLS-1$ for (Iterator iter = sentMsgList.iterator(); iter.hasNext();) { String msg = (String) iter.next(); LogManager.logInfo("SomeContext", msg); //$NON-NLS-1$ } List recevedMsgList = listener.getLoggedMessages(); assertEquals(sentMsgList, recevedMsgList); } /** * * A log listener that saves messages (IStatus)s in a * List for later comparison. */ class ListLogger implements LogListener { private List messages = new ArrayList(); private int expectedMessages; public ListLogger(int expectedMessages) { this.expectedMessages = expectedMessages; } /* (non-Javadoc) * @see com.metamatrix.core.log.LogListener#logMessage(org.eclipse.core.runtime.IStatus, long, java.lang.String, java.lang.String) */ public synchronized void logMessage(LogMessage msg){ this.messages.add(msg.getText()); if (this.messages.size() == expectedMessages) { this.notifyAll(); } } /* (non-Javadoc) * @see com.metamatrix.core.log.LogListener#shutdown() */ public void shutdown() { messages.clear(); messages = null; } public int size() { return this.messages.size(); } public synchronized List getLoggedMessages() throws InterruptedException { if (this.messages.size() < expectedMessages) { this.wait(1000); } return this.messages; } } }
fixed error that was being thrown (but wasn't causing a failure)
federate-common-internal/src/test/java/com/metamatrix/common/log/TestLogManager.java
fixed error that was being thrown (but wasn't causing a failure)
Java
lgpl-2.1
a54b86124508ee7a8fc3a0f4847e8092748cbc10
0
justincc/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,JoeCarlson/intermine,kimrutherford/intermine,kimrutherford/intermine,justincc/intermine,JoeCarlson/intermine,zebrafishmine/intermine,elsiklab/intermine,kimrutherford/intermine,joshkh/intermine,tomck/intermine,zebrafishmine/intermine,justincc/intermine,elsiklab/intermine,tomck/intermine,tomck/intermine,elsiklab/intermine,zebrafishmine/intermine,zebrafishmine/intermine,elsiklab/intermine,tomck/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,joshkh/intermine,zebrafishmine/intermine,JoeCarlson/intermine,kimrutherford/intermine,kimrutherford/intermine,elsiklab/intermine,JoeCarlson/intermine,elsiklab/intermine,kimrutherford/intermine,JoeCarlson/intermine,justincc/intermine,justincc/intermine,justincc/intermine,tomck/intermine,justincc/intermine,joshkh/intermine,tomck/intermine,joshkh/intermine,JoeCarlson/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,JoeCarlson/intermine,elsiklab/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,zebrafishmine/intermine,elsiklab/intermine,justincc/intermine,tomck/intermine,joshkh/intermine,kimrutherford/intermine,zebrafishmine/intermine,kimrutherford/intermine,kimrutherford/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine
package org.intermine.bio.dataconversion; /* * Copyright (C) 2002-2011 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.intermine.dataconversion.ItemWriter; import org.intermine.metadata.Model; import org.intermine.objectstore.ObjectStoreException; import org.intermine.sql.Database; import org.intermine.util.StringUtil; import org.intermine.xml.full.Item; import org.intermine.xml.full.ReferenceList; /** * Read Ensembl SNP data directly from MySQL variation database. * @author Richard Smith */ public class EnsemblSnpDbConverter extends BioDBConverter { private static final String DATASET_TITLE = "Ensembl SNP data"; private static final String DATA_SOURCE_NAME = "Ensembl"; private final Map<String, Set<String>> pendingSnpConsequences = new HashMap<String, Set<String>>(); private final Map<String, Integer> storedSnpIds = new HashMap<String, Integer>(); private final Map<String, String> storedSnpItemIdentifiers = new HashMap<String, String>(); private Set<String> snpSourceIds = null; // store a mapping from variation_id in ensembl database to stored SNP id in objectstore //private IntToIntMap variationIdToObjectId = new IntToIntMap(); private Map<Integer, String> variationIdToItemIdentifier = new HashMap<Integer, String>(); // default to human or take value set by parser Integer taxonId = null; private static final int PLANT = 3702; // There may be SNPs from multiple sources in the database, optionally restrict them Set<String> snpSources = new HashSet<String>(); // Edit to restrict to loading fewer chromosomes private static final int MIN_CHROMOSOME = 1; private Map<String, String> sources = new HashMap<String, String>(); private Map<String, String> states = new HashMap<String, String>(); private Map<String, String> transcripts = new HashMap<String, String>(); private Map<String, String> consequenceTypes = new HashMap<String, String>(); private static final Logger LOG = Logger.getLogger(EnsemblSnpDbConverter.class); /** * Construct a new EnsemblSnpDbConverter. * @param database the database to read from * @param model the Model used by the object store we will write to with the ItemWriter * @param writer an ItemWriter used to handle Items created */ public EnsemblSnpDbConverter(Database database, Model model, ItemWriter writer) { super(database, model, writer, DATA_SOURCE_NAME, DATASET_TITLE); } /** * Set the organism to load * @param taxonId the organism to load */ public void setOrganism(String taxonId) { this.taxonId = Integer.parseInt(taxonId); } /** * Optionally restrict the sources of SNPs to load by entries in source table, e.g. to dbSNP. * @param sourceStr a space-separated list of sources */ public void setSources(String sourceStr) { for (String source : sourceStr.split(" ")) { snpSources.add(source.trim()); } } /** * {@inheritDoc} */ public void process() throws Exception { // a database has been initialised from properties starting with db.ensembl-snp-db if (this.taxonId == null) { throw new IllegalArgumentException("Must supply a taxon id for this variation database" + " set the 'organism' property in project.xml"); } Connection connection = getDatabase().getConnection(); List<String> chrNames = new ArrayList<String>(); for (int i = MIN_CHROMOSOME; i <= 22; i++) { chrNames.add("" + i); } chrNames.add("X"); chrNames.add("Y"); chrNames.add("MT"); chrNames.add("Mt"); chrNames.add("Pt"); for (String chrName : chrNames) { process(connection, chrName); createSynonyms(connection, chrName); } storeFinalSnps(); if (PLANT == this.taxonId.intValue()) { processGenotypes(connection); } connection.close(); } private void storeFinalSnps() throws Exception { LOG.info("storeFinalSnps() pendingConsequences.size(): " + pendingSnpConsequences.size()); LOG.info("storeFinalSnps() storedSnpIds.size(): " + storedSnpIds.size()); for (String rsNumber : pendingSnpConsequences.keySet()) { Integer storedSnpId = storedSnpIds.get(rsNumber); Set<String> consequenceIdentifiers = pendingSnpConsequences.get(rsNumber); ReferenceList col = new ReferenceList("consequences", new ArrayList<String>(consequenceIdentifiers)); store(col, storedSnpId); } } /** * {@inheritDoc} */ public void process(Connection connection, String chrName) throws Exception { LOG.info("Starting to process chromosome " + chrName); ResultSet res = queryVariation(connection, chrName); int counter = 0; int snpCounter = 0; Item currentSnp = null; Set<String> seenLocsForSnp = new HashSet<String>(); String previousRsNumber = null; Boolean previousUniqueLocation = true; Set<String> consequenceIdentifiers = new HashSet<String>(); boolean storeSnp = false; String currentSnpIdentifier = null; Integer currentVariationId = null; // This code is complicated because not all SNPs map to a unique location and often have // locations on multiple chromosomes - we're processing one chromosome at a time for faster // queries to mySQL. while (res.next()) { counter++; String rsNumber = res.getString("variation_name"); boolean newSnp = rsNumber.equals(previousRsNumber) ? false : true; if (newSnp) { // starting a new SNP, store the one just finished - previousRsNumber Integer storedSnpId = storedSnpIds.get(previousRsNumber); // if we didn't get back a storedSnpId this was the first time we found this SNP, // so store it now if (storeSnp && storedSnpId == null) { storedSnpId = store(currentSnp); variationIdToItemIdentifier.put(currentVariationId, currentSnp.getIdentifier()); snpCounter++; } if (previousUniqueLocation) { // the SNP we just stored has only one location so we won't see it again storeSnpCollections(storedSnpId, consequenceIdentifiers); } else { // we'll see this SNP multiple times so hang onto data Set<String> snpConsequences = pendingSnpConsequences.get(previousRsNumber); if (snpConsequences == null) { snpConsequences = new HashSet<String>(); pendingSnpConsequences.put(previousRsNumber, snpConsequences); } snpConsequences.addAll(consequenceIdentifiers); if (!storedSnpIds.containsKey(previousRsNumber)) { storedSnpIds.put(previousRsNumber, storedSnpId); storedSnpItemIdentifiers.put(previousRsNumber, currentSnp.getIdentifier()); } } // START NEW SNP previousRsNumber = rsNumber; seenLocsForSnp = new HashSet<String>(); consequenceIdentifiers = new HashSet<String>(); storeSnp = true; // map weight is the number of chromosome locations for the SNP, in practice there // are sometimes fewer locations than the map_weight indicates int mapWeight = res.getInt("map_weight"); boolean uniqueLocation = (mapWeight == 1) ? true : false; previousUniqueLocation = uniqueLocation; // if not a unique location and we've seen the SNP before, don't store if (!uniqueLocation && pendingSnpConsequences.containsKey(rsNumber)) { storeSnp = false; currentSnpIdentifier = storedSnpItemIdentifiers.get(rsNumber); } if (storeSnp) { currentSnp = createItem("SNP"); currentSnp.setAttribute("primaryIdentifier", rsNumber); currentSnp.setReference("organism", getOrganismItem(taxonId)); currentSnp.setAttribute("uniqueLocation", "" + uniqueLocation); currentSnpIdentifier = currentSnp.getIdentifier(); currentVariationId = res.getInt("variation_id"); String alleles = res.getString("allele_string"); if (!StringUtils.isBlank(alleles)) { currentSnp.setAttribute("alleles", alleles); } String type = determineType(alleles); if (type != null) { currentSnp.setAttribute("type", type); } // CHROMOSOME AND LOCATION // if SNP is mapped to multiple locations don't set chromosome and // chromosomeLocation references int start = res.getInt("seq_region_start"); int end = res.getInt("seq_region_end"); int chrStrand = res.getInt("seq_region_strand"); int chrStart = Math.min(start, end); int chrEnd = Math.max(start, end); Item loc = createItem("Location"); loc.setAttribute("start", "" + chrStart); loc.setAttribute("end", "" + chrEnd); loc.setAttribute("strand", "" + chrStrand); loc.setReference("locatedOn", getChromosome(chrName, taxonId)); loc.setReference("feature", currentSnpIdentifier); store(loc); // if mapWeight is 1 there is only one chromosome location, so set shortcuts if (uniqueLocation) { currentSnp.setReference("chromosome", getChromosome(chrName, taxonId)); currentSnp.setReference("chromosomeLocation", loc); } seenLocsForSnp.add(chrName + ":" + chrStart); // SOURCE String source = res.getString("s.name"); currentSnp.setReference("source", getSourceIdentifier(source)); // VALIDATION STATES String validationStatus = res.getString("validation_status"); List<String> validationStates = getValidationStateCollection(validationStatus); if (!validationStates.isEmpty()) { currentSnp.setCollection("validations", validationStates); } } } int mapWeight = res.getInt("map_weight"); boolean uniqueLocation = (mapWeight == 1) ? true : false; // we're on the same SNP but maybe a new location int start = res.getInt("seq_region_start"); int end = res.getInt("seq_region_end"); int strand = res.getInt("seq_region_strand"); int chrStart = Math.min(start, end); int chrEnd = Math.max(start, end); if (currentSnp == null) { LOG.error("currentSNP is null. vf.variation_feature_id: " + res.getString("variation_feature_id") + " rsNumber: " + rsNumber + " previousRsNumber: " + previousRsNumber + " storeSnp: " + storeSnp); } String chrLocStr = chrName + ":" + chrStart; if (!seenLocsForSnp.contains(chrLocStr)) { seenLocsForSnp.add(chrLocStr); // if this location is on a chromosome we want, store it Item loc = createItem("Location"); loc.setAttribute("start", "" + chrStart); loc.setAttribute("end", "" + chrEnd); loc.setAttribute("strand", "" + strand); loc.setReference("feature", currentSnpIdentifier); loc.setReference("locatedOn", getChromosome(chrName, taxonId)); store(loc); } // CONSEQUENCE TYPES // for SNPs without a uniqueLocation there will be different consequences at each one. // some consequences will need to stored at the end String type = res.getString("tv.consequence_types"); // Seen one example so far where consequence type is an empty string if (StringUtils.isBlank(type)) { type = "UNKOWN"; } String transcriptStableId = res.getString("feature_stable_id"); Item consequenceItem = createItem("Consequence"); consequenceItem.setAttribute("description", type); for (String individualType : type.split(",")) { consequenceItem.addToCollection("types", getConsequenceType(individualType.trim())); } setAttIfValue(consequenceItem, "peptideAlleles", res.getString("pep_allele_string")); setAttIfValue(consequenceItem, "siftPrediction", res.getString("sift_prediction")); setAttIfValue(consequenceItem, "siftScore", res.getString("sift_score")); setAttIfValue(consequenceItem, "polyphenPrediction", res.getString("polyphen_prediction")); setAttIfValue(consequenceItem, "polyphenScore", res.getString("polyphen_score")); if (!StringUtils.isBlank(transcriptStableId)) { consequenceItem.setReference("transcript", getTranscriptIdentifier(transcriptStableId)); } consequenceIdentifiers.add(consequenceItem.getIdentifier()); store(consequenceItem); if (counter % 100000 == 0) { LOG.info("Read " + counter + " rows total, stored " + snpCounter + " SNPs. for chr " + chrName); } } if (currentSnp != null && storeSnp) { Integer storedSnpId = store(currentSnp); variationIdToItemIdentifier.put(currentVariationId, currentSnp.getIdentifier()); if (!storedSnpIds.containsKey(storedSnpId)) { storeSnpCollections(storedSnpId, consequenceIdentifiers); } } LOG.info("Finished " + counter + " rows total, stored " + snpCounter + " SNPs for chr " + chrName); LOG.info("variationIdToItemIdentifier.size() = " + variationIdToItemIdentifier.size()); } private void setAttIfValue(Item item, String attName, String attValue) { if (!StringUtils.isBlank(attValue)) { item.setAttribute(attName, attValue); } } private String getConsequenceType(String type) throws ObjectStoreException { if (!consequenceTypes.containsKey(type)) { Item consequenceType = createItem("ConsequenceType"); consequenceType.setAttribute("type", type); store(consequenceType); consequenceTypes.put(type, consequenceType.getIdentifier()); } return consequenceTypes.get(type); } // This has to be called after process() called for the chromosome because it needs // variationIdToItemIdentifier to be populated. private void createSynonyms(Connection connection, String chrName) throws SQLException, ObjectStoreException { ResultSet res = querySynonyms(connection, chrName); int synonymCounter = 0; while (res.next()) { Integer variationId = res.getInt("variation_id"); String synonym = res.getString("name"); if (!StringUtils.isBlank(synonym)) { synonymCounter++; createSynonym(variationIdToItemIdentifier.get(variationId), synonym, true); } } LOG.info("Created " + synonymCounter + " synonyms for chr " + chrName); } private void processGenotypes(Connection connection) throws Exception { // query for strains ResultSet res = queryStrains(connection); int strainCounter = 0; while (res.next()) { Integer strainId = res.getInt("sample_id"); String strainName = res.getString("name"); Item strain = createItem("Strain"); strain.setAttribute("name", strainName); store(strain); // for each strain query and store genotypes processGenotypesForStrain(connection, strainId, strain.getIdentifier()); strainCounter++; if (strainCounter >= 100) { break; } } } private void processGenotypesForStrain(Connection connection, Integer strainId, String strainIdentifier) throws Exception { ResultSet res = queryGenotypesForStrain(connection, strainId); int snpReferenceCount = 0; int ignoredCount = 0; while (res.next()) { Integer variationId = res.getInt("variation_id"); String allele1 = res.getString("allele_1"); String allele2 = res.getString("allele_2"); String snpItemIdentifier = variationIdToItemIdentifier.get(variationId); Item genotype = createItem("Genotype"); genotype.setAttribute("allele1", allele1); genotype.setAttribute("allele2", allele2); if (snpItemIdentifier != null) { genotype.setReference("snp", snpItemIdentifier); snpReferenceCount++; } else { ignoredCount++; } genotype.setReference("strain", strainIdentifier); store(genotype); } String message = "For strain " + strainId + " snp ref: " + snpReferenceCount + ", no ref: " + ignoredCount; LOG.info(message); System.out.println(message); } private ResultSet queryGenotypesForStrain(Connection connection, Integer strainId) throws SQLException{ String query = "SELECT variation_id, allele_1, allele_2" + " FROM tmp_individual_genotype_single_bp" + " WHERE sample_id = " + strainId; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private void storeSnpCollections(Integer storedSnpId, Set<String> consequenceIdentifiers) throws ObjectStoreException { if (!consequenceIdentifiers.isEmpty()) { ReferenceList col = new ReferenceList("consequences", new ArrayList<String>(consequenceIdentifiers)); store(col, storedSnpId); } } /** * Given an allele string read from the database determine the type of variation, e.g. snp, * in-del, etc. This is a re-implementation of code from the Ensembl perl API, see: * http://www.ensembl.org/info/docs/Pdoc/ensembl-variation/ * modules/Bio/EnsEMBL/Variation/Utils/Sequence.html#CODE4 * @param alleleStr the alleles to determine the type for * @return a variation class or null if none can be determined */ protected String determineType(String alleleStr) { String type = null; final String VALID_BASES = "ATUGCYRSWKMBDHVN"; alleleStr = alleleStr.toUpperCase(); if (!StringUtils.isBlank(alleleStr)) { // snp if e.g. A/C or A|C if (alleleStr.matches("^[" + VALID_BASES + "]([\\/\\|\\\\][" + VALID_BASES + "])+$")) { type = "snp"; } else if ("CNV".equals(alleleStr)) { type = alleleStr.toLowerCase(); } else if ("CNV_PROBE".equals(alleleStr)) { type = "cnv probe"; } else if ("HGMD_MUTATION".equals(alleleStr)) { type = alleleStr.toLowerCase(); } else { String[] alleles = alleleStr.split("[\\|\\/\\\\]"); if (alleles.length == 1) { type = "het"; } else if (alleles.length == 2) { if ((StringUtils.containsOnly(alleles[0], VALID_BASES) && "-".equals(alleles[1])) || (StringUtils.containsOnly(alleles[1], VALID_BASES) && "-".equals(alleles[0]))) { type = "in-del"; } else if (containsOneOf(alleles[0], "LARGE", "INS", "DEL") || containsOneOf(alleles[1], "LARGE", "INS", "DEL")) { type = "named"; } else if ((StringUtils.containsOnly(alleles[0], VALID_BASES) && alleles[0].length() > 1) || (StringUtils.containsOnly(alleles[1], VALID_BASES) && alleles[1].length() > 1)) { // AA/GC 2 alleles type = "substitution"; } } else if (alleles.length > 2) { if (containsDigit(alleles[0])) { type = "microsat"; } else if (anyContainChar(alleles, "-")) { type = "mixed"; } } if (type == null) { LOG.warn("Failed to work out allele type for: " + alleleStr); } } } return type; } private String getSourceIdentifier(String name) throws ObjectStoreException { String sourceIdentifier = sources.get(name); if (sourceIdentifier == null) { Item source = createItem("Source"); source.setAttribute("name", name); store(source); sourceIdentifier = source.getIdentifier(); sources.put(name, sourceIdentifier); } return sourceIdentifier; } private String getTranscriptIdentifier(String transcriptStableId) throws ObjectStoreException { String transcriptIdentifier = transcripts.get(transcriptStableId); if (transcriptIdentifier == null) { Item transcript = createItem("Transcript"); transcript.setAttribute("primaryIdentifier", transcriptStableId); store(transcript); transcriptIdentifier = transcript.getIdentifier(); transcripts.put(transcriptStableId, transcriptIdentifier); } return transcriptIdentifier; } private List<String> getValidationStateCollection(String input) throws ObjectStoreException { List<String> stateIdentifiers = new ArrayList<String>(); if (!StringUtils.isBlank(input)) { for (String state : input.split(",")) { stateIdentifiers.add(getStateIdentifier(state)); } } return stateIdentifiers; } private String getStateIdentifier(String name) throws ObjectStoreException { String stateIdentifier = states.get(name); if (stateIdentifier == null) { Item state = createItem("ValidationState"); state.setAttribute("name", name); store(state); stateIdentifier = state.getIdentifier(); states.put(name, stateIdentifier); } return stateIdentifier; } private ResultSet queryVariation(Connection connection, String chrName) throws SQLException { String query = "SELECT vf.variation_feature_id, vf.variation_name, vf.variation_id," + " vf.allele_string, sr.name," + " vf.map_weight, vf.seq_region_start, vf.seq_region_end, vf.seq_region_strand, " + " s.name," + " vf.validation_status," + " vf.consequence_type," + " tv.cdna_start,tv.consequence_types,tv.pep_allele_string,tv.feature_stable_id," + " tv.sift_prediction, tv.sift_score, tv.polyphen_prediction, tv.polyphen_score" + " FROM seq_region sr, source s, variation_feature vf " + " LEFT JOIN (transcript_variation tv)" + " ON (vf.variation_feature_id = tv.variation_feature_id" + " AND tv.cdna_start is not null)" + " WHERE vf.seq_region_id = sr.seq_region_id" + " AND vf.source_id = s.source_id" + " AND sr.name = '" + chrName + "'" + " ORDER BY vf.variation_id"; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private ResultSet querySynonyms(Connection connection, String chrName) throws SQLException { String query = "SELECT vs.variation_id, vs.name" + " FROM variation_synonym vs, variation_feature vf, seq_region sr" + " WHERE vs.variation_id = vf.variation_id" + " AND vf.seq_region_id = sr.seq_region_id" + " AND sr.name = '" + chrName + "'" + " AND vs.source_id IN (" + StringUtil.join(getSnpSourceIds(connection), ",") + ")" + " ORDER BY vs.variation_id"; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private Set<String> getSnpSourceIds(Connection connection) throws SQLException { if (snpSourceIds == null) { snpSourceIds = new HashSet<String>(); String sql = "SELECT source_id FROM source"; if (snpSources != null && !snpSources.isEmpty()) { sql += " WHERE name IN (" + makeInList(snpSources) + ")"; } Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(sql); while (res.next()) { snpSourceIds.add(res.getString("source_id")); } if (snpSourceIds.isEmpty()) { throw new RuntimeException("Failed to retrieve source_ids for dbSNP source"); } } return snpSourceIds; } private String makeInList(Collection<String> strings) { Set<String> quoted = new HashSet<String>(); for (String s : strings) { quoted.add("\"" + s + "\""); } return StringUtil.join(quoted, ","); } private ResultSet queryStrains(Connection connection) throws SQLException { String query = "SELECT sample_id, name from sample"; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * {@inheritDoc} */ @Override public String getDataSetTitle(int taxonId) { return DATASET_TITLE; } private boolean containsOneOf(String target, String... substrings) { for (String substring : substrings) { if (target.contains(substring)) { return true; } } return false; } private boolean anyContainChar(String[] targets, String substring) { for (String target : targets) { if (target.contains(substring)) { return true; } } return false; } private boolean containsDigit(String target) { for (int i = 0; i < target.length(); i++) { if (Character.isDigit(target.charAt(i))) { return true; } } return false; } }
bio/sources/ensembl-snp-db/main/src/org/intermine/bio/dataconversion/EnsemblSnpDbConverter.java
package org.intermine.bio.dataconversion; /* * Copyright (C) 2002-2011 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.intermine.dataconversion.ItemWriter; import org.intermine.metadata.Model; import org.intermine.objectstore.ObjectStoreException; import org.intermine.sql.Database; import org.intermine.util.StringUtil; import org.intermine.xml.full.Item; import org.intermine.xml.full.ReferenceList; /** * Read Ensembl SNP data directly from MySQL variation database. * @author Richard Smith */ public class EnsemblSnpDbConverter extends BioDBConverter { private static final String DATASET_TITLE = "Ensembl SNP data"; private static final String DATA_SOURCE_NAME = "Ensembl"; private final Map<String, Set<String>> pendingSnpConsequences = new HashMap<String, Set<String>>(); private final Map<String, Integer> storedSnpIds = new HashMap<String, Integer>(); private final Map<String, String> storedSnpItemIdentifiers = new HashMap<String, String>(); private Set<String> snpSourceIds = null; // store a mapping from variation_id in ensembl database to stored SNP id in objectstore //private IntToIntMap variationIdToObjectId = new IntToIntMap(); private Map<Integer, String> variationIdToItemIdentifier = new HashMap<Integer, String>(); // default to human or take value set by parser Integer taxonId = null; private static final int PLANT = 3702; // There may be SNPs from multiple sources in the database, optionally restrict them Set<String> snpSources = new HashSet<String>(); // Edit to restrict to loading fewer chromosomes private static final int MIN_CHROMOSOME = 1; private Map<String, String> sources = new HashMap<String, String>(); private Map<String, String> states = new HashMap<String, String>(); private Map<String, String> transcripts = new HashMap<String, String>(); private Map<String, String> consequenceTypes = new HashMap<String, String>(); private static final Logger LOG = Logger.getLogger(EnsemblSnpDbConverter.class); /** * Construct a new EnsemblSnpDbConverter. * @param database the database to read from * @param model the Model used by the object store we will write to with the ItemWriter * @param writer an ItemWriter used to handle Items created */ public EnsemblSnpDbConverter(Database database, Model model, ItemWriter writer) { super(database, model, writer, DATA_SOURCE_NAME, DATASET_TITLE); } /** * Set the organism to load * @param taxonId the organism to load */ public void setOrganism(String taxonId) { this.taxonId = Integer.parseInt(taxonId); } /** * Optionally restrict the sources of SNPs to load by entries in source table, e.g. to dbSNP. * @param sourceStr a space-separated list of sources */ public void setSources(String sourceStr) { for (String source : sourceStr.split(" ")) { snpSources.add(source.trim()); } } /** * {@inheritDoc} */ public void process() throws Exception { // a database has been initialised from properties starting with db.ensembl-snp-db if (this.taxonId == null) { throw new IllegalArgumentException("Must supply a taxon id for this variation database" + " set the 'organism' property in project.xml"); } Connection connection = getDatabase().getConnection(); List<String> chrNames = new ArrayList<String>(); // for (int i = MIN_CHROMOSOME; i <= 22; i++) { // chrNames.add("" + i); // } // chrNames.add("X"); // chrNames.add("Y"); // chrNames.add("MT"); // chrNames.add("Mt"); // chrNames.add("Pt"); chrNames.add("22"); for (String chrName : chrNames) { process(connection, chrName); createSynonyms(connection, chrName); } storeFinalSnps(); if (PLANT == this.taxonId.intValue()) { processGenotypes(connection); } connection.close(); } private void storeFinalSnps() throws Exception { LOG.info("storeFinalSnps() pendingConsequences.size(): " + pendingSnpConsequences.size()); LOG.info("storeFinalSnps() storedSnpIds.size(): " + storedSnpIds.size()); for (String rsNumber : pendingSnpConsequences.keySet()) { Integer storedSnpId = storedSnpIds.get(rsNumber); Set<String> consequenceIdentifiers = pendingSnpConsequences.get(rsNumber); ReferenceList col = new ReferenceList("consequences", new ArrayList<String>(consequenceIdentifiers)); store(col, storedSnpId); } } /** * {@inheritDoc} */ public void process(Connection connection, String chrName) throws Exception { LOG.info("Starting to process chromosome " + chrName); ResultSet res = queryVariation(connection, chrName); int counter = 0; int snpCounter = 0; Item currentSnp = null; Set<String> seenLocsForSnp = new HashSet<String>(); String previousRsNumber = null; Boolean previousUniqueLocation = true; Set<String> consequenceIdentifiers = new HashSet<String>(); boolean storeSnp = false; String currentSnpIdentifier = null; Integer currentVariationId = null; // This code is complicated because not all SNPs map to a unique location and often have // locations on multiple chromosomes - we're processing one chromosome at a time for faster // queries to mySQL. while (res.next()) { counter++; String rsNumber = res.getString("variation_name"); boolean newSnp = rsNumber.equals(previousRsNumber) ? false : true; if (newSnp) { // starting a new SNP, store the one just finished - previousRsNumber Integer storedSnpId = storedSnpIds.get(previousRsNumber); // if we didn't get back a storedSnpId this was the first time we found this SNP, // so store it now if (storeSnp && storedSnpId == null) { storedSnpId = store(currentSnp); variationIdToItemIdentifier.put(currentVariationId, currentSnp.getIdentifier()); snpCounter++; } if (previousUniqueLocation) { // the SNP we just stored has only one location so we won't see it again storeSnpCollections(storedSnpId, consequenceIdentifiers); } else { // we'll see this SNP multiple times so hang onto data Set<String> snpConsequences = pendingSnpConsequences.get(previousRsNumber); if (snpConsequences == null) { snpConsequences = new HashSet<String>(); pendingSnpConsequences.put(previousRsNumber, snpConsequences); } snpConsequences.addAll(consequenceIdentifiers); if (!storedSnpIds.containsKey(previousRsNumber)) { storedSnpIds.put(previousRsNumber, storedSnpId); storedSnpItemIdentifiers.put(previousRsNumber, currentSnp.getIdentifier()); } } // START NEW SNP previousRsNumber = rsNumber; seenLocsForSnp = new HashSet<String>(); consequenceIdentifiers = new HashSet<String>(); storeSnp = true; // map weight is the number of chromosome locations for the SNP, in practice there // are sometimes fewer locations than the map_weight indicates int mapWeight = res.getInt("map_weight"); boolean uniqueLocation = (mapWeight == 1) ? true : false; previousUniqueLocation = uniqueLocation; // if not a unique location and we've seen the SNP before, don't store if (!uniqueLocation && pendingSnpConsequences.containsKey(rsNumber)) { storeSnp = false; currentSnpIdentifier = storedSnpItemIdentifiers.get(rsNumber); } if (storeSnp) { currentSnp = createItem("SNP"); currentSnp.setAttribute("primaryIdentifier", rsNumber); currentSnp.setReference("organism", getOrganismItem(taxonId)); currentSnp.setAttribute("uniqueLocation", "" + uniqueLocation); currentSnpIdentifier = currentSnp.getIdentifier(); currentVariationId = res.getInt("variation_id"); String alleles = res.getString("allele_string"); if (!StringUtils.isBlank(alleles)) { currentSnp.setAttribute("alleles", alleles); } String type = determineType(alleles); if (type != null) { currentSnp.setAttribute("type", type); } // CHROMOSOME AND LOCATION // if SNP is mapped to multiple locations don't set chromosome and // chromosomeLocation references int start = res.getInt("seq_region_start"); int end = res.getInt("seq_region_end"); int chrStrand = res.getInt("seq_region_strand"); int chrStart = Math.min(start, end); int chrEnd = Math.max(start, end); Item loc = createItem("Location"); loc.setAttribute("start", "" + chrStart); loc.setAttribute("end", "" + chrEnd); loc.setAttribute("strand", "" + chrStrand); loc.setReference("locatedOn", getChromosome(chrName, taxonId)); loc.setReference("feature", currentSnpIdentifier); store(loc); // if mapWeight is 1 there is only one chromosome location, so set shortcuts if (uniqueLocation) { currentSnp.setReference("chromosome", getChromosome(chrName, taxonId)); currentSnp.setReference("chromosomeLocation", loc); } seenLocsForSnp.add(chrName + ":" + chrStart); // SOURCE String source = res.getString("s.name"); currentSnp.setReference("source", getSourceIdentifier(source)); // VALIDATION STATES String validationStatus = res.getString("validation_status"); List<String> validationStates = getValidationStateCollection(validationStatus); if (!validationStates.isEmpty()) { currentSnp.setCollection("validations", validationStates); } } } int mapWeight = res.getInt("map_weight"); boolean uniqueLocation = (mapWeight == 1) ? true : false; // we're on the same SNP but maybe a new location int start = res.getInt("seq_region_start"); int end = res.getInt("seq_region_end"); int strand = res.getInt("seq_region_strand"); int chrStart = Math.min(start, end); int chrEnd = Math.max(start, end); if (currentSnp == null) { LOG.error("currentSNP is null. vf.variation_feature_id: " + res.getString("variation_feature_id") + " rsNumber: " + rsNumber + " previousRsNumber: " + previousRsNumber + " storeSnp: " + storeSnp); } String chrLocStr = chrName + ":" + chrStart; if (!seenLocsForSnp.contains(chrLocStr)) { seenLocsForSnp.add(chrLocStr); // if this location is on a chromosome we want, store it Item loc = createItem("Location"); loc.setAttribute("start", "" + chrStart); loc.setAttribute("end", "" + chrEnd); loc.setAttribute("strand", "" + strand); loc.setReference("feature", currentSnpIdentifier); loc.setReference("locatedOn", getChromosome(chrName, taxonId)); store(loc); } // CONSEQUENCE TYPES // for SNPs without a uniqueLocation there will be different consequences at each one. // some consequences will need to stored at the end String type = res.getString("tv.consequence_types"); // Seen one example so far where consequence type is an empty string if (StringUtils.isBlank(type)) { type = "UNKOWN"; } String transcriptStableId = res.getString("feature_stable_id"); Item consequenceItem = createItem("Consequence"); consequenceItem.setAttribute("description", type); for (String individualType : type.split(",")) { consequenceItem.addToCollection("types", getConsequenceType(individualType.trim())); } setAttIfValue(consequenceItem, "peptideAlleles", res.getString("pep_allele_string")); setAttIfValue(consequenceItem, "siftPrediction", res.getString("sift_prediction")); setAttIfValue(consequenceItem, "siftScore", res.getString("sift_score")); setAttIfValue(consequenceItem, "polyphenPrediction", res.getString("polyphen_prediction")); setAttIfValue(consequenceItem, "polyphenScore", res.getString("polyphen_score")); if (!StringUtils.isBlank(transcriptStableId)) { consequenceItem.setReference("transcript", getTranscriptIdentifier(transcriptStableId)); } consequenceIdentifiers.add(consequenceItem.getIdentifier()); store(consequenceItem); if (counter % 100000 == 0) { LOG.info("Read " + counter + " rows total, stored " + snpCounter + " SNPs. for chr " + chrName); } } if (currentSnp != null && storeSnp) { Integer storedSnpId = store(currentSnp); variationIdToItemIdentifier.put(currentVariationId, currentSnp.getIdentifier()); if (!storedSnpIds.containsKey(storedSnpId)) { storeSnpCollections(storedSnpId, consequenceIdentifiers); } } LOG.info("Finished " + counter + " rows total, stored " + snpCounter + " SNPs for chr " + chrName); LOG.info("variationIdToItemIdentifier.size() = " + variationIdToItemIdentifier.size()); } private void setAttIfValue(Item item, String attName, String attValue) { if (!StringUtils.isBlank(attValue)) { item.setAttribute(attName, attValue); } } private String getConsequenceType(String type) throws ObjectStoreException { if (!consequenceTypes.containsKey(type)) { Item consequenceType = createItem("ConsequenceType"); consequenceType.setAttribute("type", type); store(consequenceType); consequenceTypes.put(type, consequenceType.getIdentifier()); } return consequenceTypes.get(type); } // This has to be called after process() called for the chromosome because it needs // variationIdToItemIdentifier to be populated. private void createSynonyms(Connection connection, String chrName) throws SQLException, ObjectStoreException { ResultSet res = querySynonyms(connection, chrName); int synonymCounter = 0; while (res.next()) { Integer variationId = res.getInt("variation_id"); String synonym = res.getString("name"); if (!StringUtils.isBlank(synonym)) { synonymCounter++; createSynonym(variationIdToItemIdentifier.get(variationId), synonym, true); } } LOG.info("Created " + synonymCounter + " synonyms for chr " + chrName); } private void processGenotypes(Connection connection) throws Exception { // query for strains ResultSet res = queryStrains(connection); int strainCounter = 0; while (res.next()) { Integer strainId = res.getInt("sample_id"); String strainName = res.getString("name"); Item strain = createItem("Strain"); strain.setAttribute("name", strainName); store(strain); // for each strain query and store genotypes processGenotypesForStrain(connection, strainId, strain.getIdentifier()); strainCounter++; if (strainCounter >= 100) { break; } } } private void processGenotypesForStrain(Connection connection, Integer strainId, String strainIdentifier) throws Exception { ResultSet res = queryGenotypesForStrain(connection, strainId); int snpReferenceCount = 0; int ignoredCount = 0; while (res.next()) { Integer variationId = res.getInt("variation_id"); String allele1 = res.getString("allele_1"); String allele2 = res.getString("allele_2"); String snpItemIdentifier = variationIdToItemIdentifier.get(variationId); Item genotype = createItem("Genotype"); genotype.setAttribute("allele1", allele1); genotype.setAttribute("allele2", allele2); if (snpItemIdentifier != null) { genotype.setReference("snp", snpItemIdentifier); snpReferenceCount++; } else { ignoredCount++; } genotype.setReference("strain", strainIdentifier); store(genotype); } String message = "For strain " + strainId + " snp ref: " + snpReferenceCount + ", no ref: " + ignoredCount; LOG.info(message); System.out.println(message); } private ResultSet queryGenotypesForStrain(Connection connection, Integer strainId) throws SQLException{ String query = "SELECT variation_id, allele_1, allele_2" + " FROM tmp_individual_genotype_single_bp" + " WHERE sample_id = " + strainId; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private void storeSnpCollections(Integer storedSnpId, Set<String> consequenceIdentifiers) throws ObjectStoreException { if (!consequenceIdentifiers.isEmpty()) { ReferenceList col = new ReferenceList("consequences", new ArrayList<String>(consequenceIdentifiers)); store(col, storedSnpId); } } /** * Given an allele string read from the database determine the type of variation, e.g. snp, * in-del, etc. This is a re-implementation of code from the Ensembl perl API, see: * http://www.ensembl.org/info/docs/Pdoc/ensembl-variation/ * modules/Bio/EnsEMBL/Variation/Utils/Sequence.html#CODE4 * @param alleleStr the alleles to determine the type for * @return a variation class or null if none can be determined */ protected String determineType(String alleleStr) { String type = null; final String VALID_BASES = "ATUGCYRSWKMBDHVN"; alleleStr = alleleStr.toUpperCase(); if (!StringUtils.isBlank(alleleStr)) { // snp if e.g. A/C or A|C if (alleleStr.matches("^[" + VALID_BASES + "]([\\/\\|\\\\][" + VALID_BASES + "])+$")) { type = "snp"; } else if ("CNV".equals(alleleStr)) { type = alleleStr.toLowerCase(); } else if ("CNV_PROBE".equals(alleleStr)) { type = "cnv probe"; } else if ("HGMD_MUTATION".equals(alleleStr)) { type = alleleStr.toLowerCase(); } else { String[] alleles = alleleStr.split("[\\|\\/\\\\]"); if (alleles.length == 1) { type = "het"; } else if (alleles.length == 2) { if ((StringUtils.containsOnly(alleles[0], VALID_BASES) && "-".equals(alleles[1])) || (StringUtils.containsOnly(alleles[1], VALID_BASES) && "-".equals(alleles[0]))) { type = "in-del"; } else if (containsOneOf(alleles[0], "LARGE", "INS", "DEL") || containsOneOf(alleles[1], "LARGE", "INS", "DEL")) { type = "named"; } else if ((StringUtils.containsOnly(alleles[0], VALID_BASES) && alleles[0].length() > 1) || (StringUtils.containsOnly(alleles[1], VALID_BASES) && alleles[1].length() > 1)) { // AA/GC 2 alleles type = "substitution"; } } else if (alleles.length > 2) { if (containsDigit(alleles[0])) { type = "microsat"; } else if (anyContainChar(alleles, "-")) { type = "mixed"; } } if (type == null) { LOG.warn("Failed to work out allele type for: " + alleleStr); } } } return type; } private String getSourceIdentifier(String name) throws ObjectStoreException { String sourceIdentifier = sources.get(name); if (sourceIdentifier == null) { Item source = createItem("Source"); source.setAttribute("name", name); store(source); sourceIdentifier = source.getIdentifier(); sources.put(name, sourceIdentifier); } return sourceIdentifier; } private String getTranscriptIdentifier(String transcriptStableId) throws ObjectStoreException { String transcriptIdentifier = transcripts.get(transcriptStableId); if (transcriptIdentifier == null) { Item transcript = createItem("Transcript"); transcript.setAttribute("primaryIdentifier", transcriptStableId); store(transcript); transcriptIdentifier = transcript.getIdentifier(); transcripts.put(transcriptStableId, transcriptIdentifier); } return transcriptIdentifier; } private List<String> getValidationStateCollection(String input) throws ObjectStoreException { List<String> stateIdentifiers = new ArrayList<String>(); if (!StringUtils.isBlank(input)) { for (String state : input.split(",")) { stateIdentifiers.add(getStateIdentifier(state)); } } return stateIdentifiers; } private String getStateIdentifier(String name) throws ObjectStoreException { String stateIdentifier = states.get(name); if (stateIdentifier == null) { Item state = createItem("ValidationState"); state.setAttribute("name", name); store(state); stateIdentifier = state.getIdentifier(); states.put(name, stateIdentifier); } return stateIdentifier; } private ResultSet queryVariation(Connection connection, String chrName) throws SQLException { String query = "SELECT vf.variation_feature_id, vf.variation_name, vf.variation_id," + " vf.allele_string, sr.name," + " vf.map_weight, vf.seq_region_start, vf.seq_region_end, vf.seq_region_strand, " + " s.name," + " vf.validation_status," + " vf.consequence_type," + " tv.cdna_start,tv.consequence_types,tv.pep_allele_string,tv.feature_stable_id," + " tv.sift_prediction, tv.sift_score, tv.polyphen_prediction, tv.polyphen_score" + " FROM seq_region sr, source s, variation_feature vf " + " LEFT JOIN (transcript_variation tv)" + " ON (vf.variation_feature_id = tv.variation_feature_id" + " AND tv.cdna_start is not null)" + " WHERE vf.seq_region_id = sr.seq_region_id" + " AND vf.source_id = s.source_id" + " AND sr.name = '" + chrName + "'" + " ORDER BY vf.variation_id"; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private ResultSet querySynonyms(Connection connection, String chrName) throws SQLException { String query = "SELECT vs.variation_id, vs.name" + " FROM variation_synonym vs, variation_feature vf, seq_region sr" + " WHERE vs.variation_id = vf.variation_id" + " AND vf.seq_region_id = sr.seq_region_id" + " AND sr.name = '" + chrName + "'" + " AND vs.source_id IN (" + StringUtil.join(getSnpSourceIds(connection), ",") + ")" + " ORDER BY vs.variation_id"; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } private Set<String> getSnpSourceIds(Connection connection) throws SQLException { if (snpSourceIds == null) { snpSourceIds = new HashSet<String>(); String sql = "SELECT source_id FROM source"; if (snpSources != null && !snpSources.isEmpty()) { sql += " WHERE name IN (" + makeInList(snpSources) + ")"; } Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(sql); while (res.next()) { snpSourceIds.add(res.getString("source_id")); } if (snpSourceIds.isEmpty()) { throw new RuntimeException("Failed to retrieve source_ids for dbSNP source"); } } return snpSourceIds; } private String makeInList(Collection<String> strings) { Set<String> quoted = new HashSet<String>(); for (String s : strings) { quoted.add("\"" + s + "\""); } return StringUtil.join(quoted, ","); } private ResultSet queryStrains(Connection connection) throws SQLException { String query = "SELECT sample_id, name from sample"; LOG.warn(query); System.out.println(query); Statement stmt = connection.createStatement(); ResultSet res = stmt.executeQuery(query); return res; } /** * {@inheritDoc} */ @Override public String getDataSetTitle(int taxonId) { return DATASET_TITLE; } private boolean containsOneOf(String target, String... substrings) { for (String substring : substrings) { if (target.contains(substring)) { return true; } } return false; } private boolean anyContainChar(String[] targets, String substring) { for (String target : targets) { if (target.contains(substring)) { return true; } } return false; } private boolean containsDigit(String target) { for (int i = 0; i < target.length(); i++) { if (Character.isDigit(target.charAt(i))) { return true; } } return false; } }
Restore all chromosomes reading SNPs.
bio/sources/ensembl-snp-db/main/src/org/intermine/bio/dataconversion/EnsemblSnpDbConverter.java
Restore all chromosomes reading SNPs.
Java
apache-2.0
a1d72ef6adb3540b88695f12b995219dd2758f75
0
cniesen/rice,bhutchinson/rice,ewestfal/rice-svn2git-test,UniversityOfHawaiiORS/rice,ewestfal/rice,gathreya/rice-kc,kuali/kc-rice,rojlarge/rice-kc,sonamuthu/rice-1,UniversityOfHawaiiORS/rice,jwillia/kc-rice1,gathreya/rice-kc,smith750/rice,ewestfal/rice,bhutchinson/rice,shahess/rice,shahess/rice,kuali/kc-rice,shahess/rice,geothomasp/kualico-rice-kc,smith750/rice,cniesen/rice,geothomasp/kualico-rice-kc,ewestfal/rice,kuali/kc-rice,smith750/rice,bsmith83/rice-1,rojlarge/rice-kc,UniversityOfHawaiiORS/rice,geothomasp/kualico-rice-kc,smith750/rice,ewestfal/rice-svn2git-test,sonamuthu/rice-1,bhutchinson/rice,ewestfal/rice-svn2git-test,bsmith83/rice-1,rojlarge/rice-kc,geothomasp/kualico-rice-kc,bhutchinson/rice,sonamuthu/rice-1,UniversityOfHawaiiORS/rice,bsmith83/rice-1,ewestfal/rice-svn2git-test,ewestfal/rice,gathreya/rice-kc,shahess/rice,cniesen/rice,shahess/rice,cniesen/rice,UniversityOfHawaiiORS/rice,bsmith83/rice-1,jwillia/kc-rice1,cniesen/rice,gathreya/rice-kc,kuali/kc-rice,jwillia/kc-rice1,sonamuthu/rice-1,ewestfal/rice,kuali/kc-rice,geothomasp/kualico-rice-kc,rojlarge/rice-kc,smith750/rice,rojlarge/rice-kc,bhutchinson/rice,gathreya/rice-kc,jwillia/kc-rice1,jwillia/kc-rice1
/* * Copyright 2005-2007 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kns.web.struts.action; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.kuali.rice.core.util.RiceConstants; import org.kuali.rice.kim.bo.Person; import org.kuali.rice.kns.authorization.AuthorizationConstants; import org.kuali.rice.kns.document.Copyable; import org.kuali.rice.kns.document.Document; import org.kuali.rice.kns.document.authorization.TransactionalDocumentAuthorizer; import org.kuali.rice.kns.document.authorization.TransactionalDocumentPresentationController; import org.kuali.rice.kns.exception.DocumentAuthorizationException; import org.kuali.rice.kns.service.KNSServiceLocator; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.kns.util.KNSConstants; import org.kuali.rice.kns.web.struts.form.KualiDocumentFormBase; import org.kuali.rice.kns.web.struts.form.KualiTransactionalDocumentFormBase; /** * This class handles UI actions for all shared methods of transactional documents. */ public class KualiTransactionalDocumentActionBase extends KualiDocumentActionBase { private static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(KualiTransactionalDocumentActionBase.class); /** * Method that will take the current document and call its copy method if Copyable. * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward copy(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { KualiTransactionalDocumentFormBase tmpForm = (KualiTransactionalDocumentFormBase) form; Document document = tmpForm.getDocument(); if (!tmpForm.getDocumentActions().containsKey(KNSConstants.KUALI_ACTION_CAN_COPY)) { throw buildAuthorizationException("copy", document); } ((Copyable) tmpForm.getTransactionalDocument()).toCopy(); return mapping.findForward(RiceConstants.MAPPING_BASIC); } protected void populateAuthorizationFields(KualiDocumentFormBase formBase){ super.populateAuthorizationFields(formBase); Document document = formBase.getDocument(); Map editMode = new HashMap(); if (formBase.isFormDocumentInitialized()) { Person user = GlobalVariables.getUserSession().getPerson(); TransactionalDocumentPresentationController documentPresentationController = (TransactionalDocumentPresentationController) KNSServiceLocator.getDocumentPresentationControllerService().getDocumentPresentationController(document); TransactionalDocumentAuthorizer documentAuthorizer = (TransactionalDocumentAuthorizer) KNSServiceLocator.getDocumentAuthorizationService().getDocumentAuthorizer(document); Set<String> editModes = documentPresentationController.getEditModes(document); editModes = documentAuthorizer.getEditModes(document, user, editModes); editMode = this.convertSetToMap(editModes); if (KNSServiceLocator.getDataDictionaryService().getDataDictionary().getDocumentEntry(document.getClass().getName()).getUsePessimisticLocking()) { editMode = KNSServiceLocator.getDocumentPessimisticLockerService().establishLocks(document, editMode, user); } } if(formBase.getDocumentActions().containsKey(KNSConstants.KUALI_ACTION_CAN_EDIT)){ editMode.put(AuthorizationConstants.EditMode.FULL_ENTRY, KNSConstants.KUALI_DEFAULT_TRUE_VALUE); } else editMode.put(AuthorizationConstants.EditMode.VIEW_ONLY, KNSConstants.KUALI_DEFAULT_TRUE_VALUE); //having a problem empty/readonly formBase.setEditingMode(editMode); if (formBase.getEditingMode().containsKey(AuthorizationConstants.EditMode.UNVIEWABLE)) { throw new DocumentAuthorizationException(GlobalVariables.getUserSession().getPerson().getName(), "view", document.getDocumentHeader().getDocumentNumber()); } } }
impl/src/main/java/org/kuali/rice/kns/web/struts/action/KualiTransactionalDocumentActionBase.java
/* * Copyright 2005-2007 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kns.web.struts.action; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.kuali.rice.core.util.RiceConstants; import org.kuali.rice.kim.bo.Person; import org.kuali.rice.kns.authorization.AuthorizationConstants; import org.kuali.rice.kns.document.Copyable; import org.kuali.rice.kns.document.Document; import org.kuali.rice.kns.document.authorization.TransactionalDocumentAuthorizer; import org.kuali.rice.kns.document.authorization.TransactionalDocumentPresentationController; import org.kuali.rice.kns.exception.DocumentAuthorizationException; import org.kuali.rice.kns.service.KNSServiceLocator; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.kns.util.KNSConstants; import org.kuali.rice.kns.web.struts.form.KualiDocumentFormBase; import org.kuali.rice.kns.web.struts.form.KualiTransactionalDocumentFormBase; /** * This class handles UI actions for all shared methods of transactional documents. */ public class KualiTransactionalDocumentActionBase extends KualiDocumentActionBase { private static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(KualiTransactionalDocumentActionBase.class); /** * Method that will take the current document and call its copy method if Copyable. * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward copy(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { KualiTransactionalDocumentFormBase tmpForm = (KualiTransactionalDocumentFormBase) form; Document document = tmpForm.getDocument(); if (!tmpForm.getDocumentActions().containsKey(KNSConstants.KUALI_ACTION_CAN_COPY)) { throw buildAuthorizationException("copy", document); } ((Copyable) tmpForm.getTransactionalDocument()).toCopy(); return mapping.findForward(RiceConstants.MAPPING_BASIC); } protected void populateAuthorizationFields(KualiDocumentFormBase formBase){ super.populateAuthorizationFields(formBase); Document document = formBase.getDocument(); Map editMode = new HashMap(); if (formBase.isFormDocumentInitialized()) { Person user = GlobalVariables.getUserSession().getPerson(); TransactionalDocumentPresentationController documentPresentationController = (TransactionalDocumentPresentationController) KNSServiceLocator.getDocumentPresentationControllerService().getDocumentPresentationController(document); TransactionalDocumentAuthorizer documentAuthorizer = (TransactionalDocumentAuthorizer) KNSServiceLocator.getDocumentAuthorizationService().getDocumentAuthorizer(document); Set<String> editModes = documentPresentationController.getEditModes(document); editModes = documentAuthorizer.getEditMode(document, user, editModes); editMode = this.convertSetToMap(editModes); if (KNSServiceLocator.getDataDictionaryService().getDataDictionary().getDocumentEntry(document.getClass().getName()).getUsePessimisticLocking()) { editMode = KNSServiceLocator.getDocumentPessimisticLockerService().establishLocks(document, editMode, user); } } if(formBase.getDocumentActions().containsKey(KNSConstants.KUALI_ACTION_CAN_EDIT)){ editMode.put(AuthorizationConstants.EditMode.FULL_ENTRY, KNSConstants.KUALI_DEFAULT_TRUE_VALUE); } else editMode.put(AuthorizationConstants.EditMode.VIEW_ONLY, KNSConstants.KUALI_DEFAULT_TRUE_VALUE); //having a problem empty/readonly formBase.setEditingMode(editMode); if (formBase.getEditingMode().containsKey(AuthorizationConstants.EditMode.UNVIEWABLE)) { throw new DocumentAuthorizationException(GlobalVariables.getUserSession().getPerson().getName(), "view", document.getDocumentHeader().getDocumentNumber()); } } }
KFSMI-2125
impl/src/main/java/org/kuali/rice/kns/web/struts/action/KualiTransactionalDocumentActionBase.java
KFSMI-2125
Java
apache-2.0
628e84a87d2203daad7cdb638f1229bf297b36c5
0
EnMasseProject/enmasse,jenmalloy/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse
/* * Copyright 2016 Red Hat Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package enmasse.mqtt; import io.vertx.core.AbstractVerticle; import io.vertx.core.Future; import io.vertx.core.net.PemKeyCertOptions; import io.vertx.mqtt.MqttEndpoint; import io.vertx.mqtt.MqttServer; import io.vertx.mqtt.MqttServerOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.util.HashMap; import java.util.Map; /** * Vert.x based MQTT gateway for EnMasse */ @Component public class MqttGateway extends AbstractVerticle { private static final Logger LOG = LoggerFactory.getLogger(MqttGateway.class); // binding info for listening private String bindAddress; private int listenPort; // connection info to the messaging service private String messagingServiceHost; private int messagingServicePort; // SSL/TLS support stuff private boolean ssl; private String certFile; private String keyFile; private MqttServer server; private Map<String, AmqpBridge> bridges; /** * Set the IP address the MQTT gateway will bind to * * @param bindAddress the IP address * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.bindaddress:0.0.0.0}") public MqttGateway setBindAddress(String bindAddress) { this.bindAddress = bindAddress; return this; } /** * Set the port the MQTT gateway will listen on for MQTT connections. * * @param listePort the port to listen on * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.listenport:1883}") public MqttGateway setListenPort(int listePort) { this.listenPort = listePort; return this; } /** * Set the address for connecting to the AMQP services * * @param messagingServiceHost address for AMQP connections * @return current MQTT gateway instance */ @Value(value = "${messaging.service.host:0.0.0.0}") public MqttGateway setMessagingServiceHost(String messagingServiceHost) { this.messagingServiceHost = messagingServiceHost; return this; } /** * Set the port for connecting to the AMQP services * * @param messagingServicePort port for AMQP connections * @return current MQTT gateway instance */ @Value(value = "${messaging.service.port:5672}") public MqttGateway setMessagingServicePort(int messagingServicePort) { this.messagingServicePort = messagingServicePort; return this; } /** * Set the SSL/TLS support needed for the MQTT connections * * @param ssl SSL/TLS is needed * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.ssl:false}") public MqttGateway setSsl(boolean ssl) { this.ssl = ssl; return this; } /** * Set the server certificate file path for SSL/TLS support * * @param certFile server certificate file path * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.certfile:./src/test/resources/tls/server-cert.pem}") public MqttGateway setCertFile(String certFile) { this.certFile = certFile; return this; } /** * Set the server private key file path for SSL/TLS support * * @param keyFile server private key file path * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.keyfile:./src/test/resources/tls/server-key.pem}") public MqttGateway setKeyFile(String keyFile) { this.keyFile = keyFile; return this; } /** * Start the MQTT server component * * @param startFuture */ private void bindMqttServer(Future<Void> startFuture) { MqttServerOptions options = new MqttServerOptions(); options.setHost(this.bindAddress).setPort(this.listenPort); if (this.ssl) { PemKeyCertOptions pemKeyCertOptions = new PemKeyCertOptions() .setKeyPath(this.keyFile) .setCertPath(this.certFile); options.setKeyCertOptions(pemKeyCertOptions) .setSsl(this.ssl); LOG.info("SSL/TLS support enabled key {} cert {}", this.keyFile, this.certFile); } this.server = MqttServer.create(this.vertx, options); this.server .endpointHandler(this::handleMqttEndpointConnection) .listen(done -> { if (done.succeeded()) { this.bridges = new HashMap<>(); LOG.info("MQTT gateway running on {}:{}", this.bindAddress, this.server.actualPort()); startFuture.complete(); } else { LOG.error("Error while starting up MQTT gateway", done.cause()); startFuture.fail(done.cause()); } }); } /** * Handler for a connection request (CONNECT) received by a remote MQTT client * * @param mqttEndpoint MQTT local endpoint */ private void handleMqttEndpointConnection(MqttEndpoint mqttEndpoint) { LOG.info("CONNECT from MQTT client {}", mqttEndpoint.clientIdentifier()); AmqpBridge bridge = new AmqpBridge(this.vertx, mqttEndpoint); bridge.mqttEndpointCloseHandler(amqpBridge -> { this.bridges.remove(amqpBridge.id()); amqpBridge.close(); LOG.info("Closed AMQP bridge for client {}", amqpBridge.id()); }).open(this.messagingServiceHost, this.messagingServicePort, done -> { if (done.succeeded()) { LOG.info("Opened AMQP bridge for client {}", done.result().id()); this.bridges.put(done.result().id(), done.result()); } else { LOG.info("Error opening the AMQP bridge ...", done.cause()); } }); } @Override public void start(Future<Void> startFuture) throws Exception { LOG.info("Starting MQTT gateway verticle..."); this.bindMqttServer(startFuture); } @Override public void stop(Future<Void> stopFuture) throws Exception { LOG.info("Stopping MQTT gateway verticle ..."); Future<Void> shutdownTracker = Future.future(); shutdownTracker.setHandler(done -> { if (done.succeeded()) { LOG.info("MQTT gateway has been shut down successfully"); stopFuture.complete(); } else { LOG.info("Error while shutting down MQTT gateway", done.cause()); stopFuture.fail(done.cause()); } }); if (this.server != null) { this.bridges.entrySet().stream().forEach(entry -> { entry.getValue().close(); }); this.server.close(shutdownTracker.completer()); } else { shutdownTracker.complete(); } } }
src/main/java/enmasse/mqtt/MqttGateway.java
/* * Copyright 2016 Red Hat Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package enmasse.mqtt; import io.vertx.core.AbstractVerticle; import io.vertx.core.Future; import io.vertx.core.net.PemKeyCertOptions; import io.vertx.mqtt.MqttEndpoint; import io.vertx.mqtt.MqttServer; import io.vertx.mqtt.MqttServerOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.util.HashMap; import java.util.Map; /** * Vert.x based MQTT gateway for EnMasse */ @Component public class MqttGateway extends AbstractVerticle { private static final Logger LOG = LoggerFactory.getLogger(MqttGateway.class); // binding info for listening private String bindAddress; private int listenPort; // connection info to the messaging service private String messagingServiceHost; private int messagingServicePort; // SSL/TLS support stuff private boolean ssl; private String certFile; private String keyFile; private MqttServer server; private Map<String, AmqpBridge> bridges; /** * Set the IP address the MQTT gateway will bind to * * @param bindAddress the IP address * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.bindaddress:0.0.0.0}") public MqttGateway setBindAddress(String bindAddress) { this.bindAddress = bindAddress; return this; } /** * Set the port the MQTT gateway will listen on for MQTT connections. * * @param listePort the port to listen on * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.listenport:1883}") public MqttGateway setListenPort(int listePort) { this.listenPort = listePort; return this; } /** * Set the address for connecting to the AMQP services * * @param messagingServiceHost address for AMQP connections * @return current MQTT gateway instance */ @Value(value = "${messaging.service.host:0.0.0.0}") public MqttGateway setMessagingServiceHost(String messagingServiceHost) { this.messagingServiceHost = messagingServiceHost; return this; } /** * Set the port for connecting to the AMQP services * * @param messagingServicePort port for AMQP connections * @return current MQTT gateway instance */ @Value(value = "${messaging.service.port:5672}") public MqttGateway setMessagingServicePort(int messagingServicePort) { this.messagingServicePort = messagingServicePort; return this; } /** * Set the SSL/TLS support needed for the MQTT connections * * @param ssl SSL/TLS is needed * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.ssl:false}") public MqttGateway setSsl(boolean ssl) { this.ssl = ssl; return this; } /** * Set the server certificate file path for SSL/TLS support * * @param certFile server certificate file path * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.certfile:./src/test/resources/tls/server-key.pem}") public MqttGateway setCertFile(String certFile) { this.certFile = certFile; return this; } /** * Set the server private key file path for SSL/TLS support * * @param keyFile server private key file path * @return current MQTT gateway instance */ @Value(value = "${enmasse.mqtt.keyfile:./src/test/resources/tls/server-cert.pem}") public MqttGateway setKeyFile(String keyFile) { this.keyFile = keyFile; return this; } /** * Start the MQTT server component * * @param startFuture */ private void bindMqttServer(Future<Void> startFuture) { MqttServerOptions options = new MqttServerOptions(); options.setHost(this.bindAddress).setPort(this.listenPort); if (this.ssl) { PemKeyCertOptions pemKeyCertOptions = new PemKeyCertOptions() .setKeyPath(this.keyFile) .setCertPath(this.certFile); options.setKeyCertOptions(pemKeyCertOptions) .setSsl(this.ssl); LOG.info("SSL/TLS support enabled key {} cert {}", this.keyFile, this.certFile); } this.server = MqttServer.create(this.vertx, options); this.server .endpointHandler(this::handleMqttEndpointConnection) .listen(done -> { if (done.succeeded()) { this.bridges = new HashMap<>(); LOG.info("MQTT gateway running on {}:{}", this.bindAddress, this.server.actualPort()); startFuture.complete(); } else { LOG.error("Error while starting up MQTT gateway", done.cause()); startFuture.fail(done.cause()); } }); } /** * Handler for a connection request (CONNECT) received by a remote MQTT client * * @param mqttEndpoint MQTT local endpoint */ private void handleMqttEndpointConnection(MqttEndpoint mqttEndpoint) { LOG.info("CONNECT from MQTT client {}", mqttEndpoint.clientIdentifier()); AmqpBridge bridge = new AmqpBridge(this.vertx, mqttEndpoint); bridge.mqttEndpointCloseHandler(amqpBridge -> { this.bridges.remove(amqpBridge.id()); amqpBridge.close(); LOG.info("Closed AMQP bridge for client {}", amqpBridge.id()); }).open(this.messagingServiceHost, this.messagingServicePort, done -> { if (done.succeeded()) { LOG.info("Opened AMQP bridge for client {}", done.result().id()); this.bridges.put(done.result().id(), done.result()); } else { LOG.info("Error opening the AMQP bridge ...", done.cause()); } }); } @Override public void start(Future<Void> startFuture) throws Exception { LOG.info("Starting MQTT gateway verticle..."); this.bindMqttServer(startFuture); } @Override public void stop(Future<Void> stopFuture) throws Exception { LOG.info("Stopping MQTT gateway verticle ..."); Future<Void> shutdownTracker = Future.future(); shutdownTracker.setHandler(done -> { if (done.succeeded()) { LOG.info("MQTT gateway has been shut down successfully"); stopFuture.complete(); } else { LOG.info("Error while shutting down MQTT gateway", done.cause()); stopFuture.fail(done.cause()); } }); if (this.server != null) { this.bridges.entrySet().stream().forEach(entry -> { entry.getValue().close(); }); this.server.close(shutdownTracker.completer()); } else { shutdownTracker.complete(); } } }
Use correct defaults for key and cert
src/main/java/enmasse/mqtt/MqttGateway.java
Use correct defaults for key and cert
Java
apache-2.0
f273981f37ed02158de818f510c7826e13cfe592
0
Stacey-Gammon/elasticsearch,markwalkom/elasticsearch,markwalkom/elasticsearch,robin13/elasticsearch,geidies/elasticsearch,scottsom/elasticsearch,IanvsPoplicola/elasticsearch,mortonsykes/elasticsearch,njlawton/elasticsearch,sneivandt/elasticsearch,C-Bish/elasticsearch,geidies/elasticsearch,i-am-Nathan/elasticsearch,rlugojr/elasticsearch,mohit/elasticsearch,glefloch/elasticsearch,a2lin/elasticsearch,robin13/elasticsearch,StefanGor/elasticsearch,gingerwizard/elasticsearch,mjason3/elasticsearch,naveenhooda2000/elasticsearch,kalimatas/elasticsearch,LeoYao/elasticsearch,yanjunh/elasticsearch,mjason3/elasticsearch,dongjoon-hyun/elasticsearch,masaruh/elasticsearch,obourgain/elasticsearch,awislowski/elasticsearch,umeshdangat/elasticsearch,rlugojr/elasticsearch,scorpionvicky/elasticsearch,ThiagoGarciaAlves/elasticsearch,fred84/elasticsearch,gingerwizard/elasticsearch,yanjunh/elasticsearch,ricardocerq/elasticsearch,s1monw/elasticsearch,maddin2016/elasticsearch,sneivandt/elasticsearch,pozhidaevak/elasticsearch,ThiagoGarciaAlves/elasticsearch,sneivandt/elasticsearch,robin13/elasticsearch,liweinan0423/elasticsearch,gfyoung/elasticsearch,strapdata/elassandra,s1monw/elasticsearch,fforbeck/elasticsearch,Shepard1212/elasticsearch,wenpos/elasticsearch,C-Bish/elasticsearch,pozhidaevak/elasticsearch,nknize/elasticsearch,wangtuo/elasticsearch,shreejay/elasticsearch,strapdata/elassandra,spiegela/elasticsearch,jprante/elasticsearch,jprante/elasticsearch,markwalkom/elasticsearch,alexshadow007/elasticsearch,mjason3/elasticsearch,brandonkearby/elasticsearch,HonzaKral/elasticsearch,HonzaKral/elasticsearch,qwerty4030/elasticsearch,yanjunh/elasticsearch,scorpionvicky/elasticsearch,ricardocerq/elasticsearch,yanjunh/elasticsearch,girirajsharma/elasticsearch,umeshdangat/elasticsearch,uschindler/elasticsearch,Stacey-Gammon/elasticsearch,Helen-Zhao/elasticsearch,mikemccand/elasticsearch,girirajsharma/elasticsearch,glefloch/elasticsearch,coding0011/elasticsearch,strapdata/elassandra,alexshadow007/elasticsearch,gfyoung/elasticsearch,a2lin/elasticsearch,JervyShi/elasticsearch,ZTE-PaaS/elasticsearch,fred84/elasticsearch,shreejay/elasticsearch,elasticdog/elasticsearch,scottsom/elasticsearch,shreejay/elasticsearch,a2lin/elasticsearch,kalimatas/elasticsearch,nazarewk/elasticsearch,JackyMai/elasticsearch,Shepard1212/elasticsearch,MisterAndersen/elasticsearch,obourgain/elasticsearch,geidies/elasticsearch,fred84/elasticsearch,Stacey-Gammon/elasticsearch,artnowo/elasticsearch,gfyoung/elasticsearch,nezirus/elasticsearch,qwerty4030/elasticsearch,vroyer/elasticassandra,gmarz/elasticsearch,obourgain/elasticsearch,GlenRSmith/elasticsearch,spiegela/elasticsearch,IanvsPoplicola/elasticsearch,wenpos/elasticsearch,ZTE-PaaS/elasticsearch,jprante/elasticsearch,uschindler/elasticsearch,gingerwizard/elasticsearch,JackyMai/elasticsearch,jimczi/elasticsearch,nezirus/elasticsearch,yanjunh/elasticsearch,coding0011/elasticsearch,markwalkom/elasticsearch,fernandozhu/elasticsearch,JSCooke/elasticsearch,girirajsharma/elasticsearch,Shepard1212/elasticsearch,henakamaMSFT/elasticsearch,henakamaMSFT/elasticsearch,qwerty4030/elasticsearch,lks21c/elasticsearch,zkidkid/elasticsearch,lks21c/elasticsearch,nilabhsagar/elasticsearch,vroyer/elassandra,LewayneNaidoo/elasticsearch,rajanm/elasticsearch,nazarewk/elasticsearch,a2lin/elasticsearch,JervyShi/elasticsearch,fernandozhu/elasticsearch,Helen-Zhao/elasticsearch,ricardocerq/elasticsearch,umeshdangat/elasticsearch,gingerwizard/elasticsearch,njlawton/elasticsearch,dongjoon-hyun/elasticsearch,pozhidaevak/elasticsearch,masaruh/elasticsearch,uschindler/elasticsearch,StefanGor/elasticsearch,gfyoung/elasticsearch,liweinan0423/elasticsearch,nezirus/elasticsearch,StefanGor/elasticsearch,nilabhsagar/elasticsearch,strapdata/elassandra5-rc,wuranbo/elasticsearch,jprante/elasticsearch,obourgain/elasticsearch,rlugojr/elasticsearch,s1monw/elasticsearch,winstonewert/elasticsearch,JSCooke/elasticsearch,MaineC/elasticsearch,henakamaMSFT/elasticsearch,mjason3/elasticsearch,masaruh/elasticsearch,nazarewk/elasticsearch,Stacey-Gammon/elasticsearch,ricardocerq/elasticsearch,scorpionvicky/elasticsearch,LeoYao/elasticsearch,gingerwizard/elasticsearch,alexshadow007/elasticsearch,LeoYao/elasticsearch,gfyoung/elasticsearch,LeoYao/elasticsearch,pozhidaevak/elasticsearch,strapdata/elassandra5-rc,robin13/elasticsearch,strapdata/elassandra,MaineC/elasticsearch,mjason3/elasticsearch,wenpos/elasticsearch,nezirus/elasticsearch,strapdata/elassandra,maddin2016/elasticsearch,JervyShi/elasticsearch,lks21c/elasticsearch,mohit/elasticsearch,naveenhooda2000/elasticsearch,a2lin/elasticsearch,mortonsykes/elasticsearch,zkidkid/elasticsearch,spiegela/elasticsearch,wangtuo/elasticsearch,nazarewk/elasticsearch,MisterAndersen/elasticsearch,winstonewert/elasticsearch,wangtuo/elasticsearch,JSCooke/elasticsearch,vroyer/elassandra,bawse/elasticsearch,mohit/elasticsearch,HonzaKral/elasticsearch,rajanm/elasticsearch,gmarz/elasticsearch,obourgain/elasticsearch,nknize/elasticsearch,gmarz/elasticsearch,IanvsPoplicola/elasticsearch,scottsom/elasticsearch,Stacey-Gammon/elasticsearch,JackyMai/elasticsearch,mohit/elasticsearch,nilabhsagar/elasticsearch,brandonkearby/elasticsearch,bawse/elasticsearch,GlenRSmith/elasticsearch,Helen-Zhao/elasticsearch,vroyer/elasticassandra,robin13/elasticsearch,JSCooke/elasticsearch,coding0011/elasticsearch,wangtuo/elasticsearch,JervyShi/elasticsearch,dongjoon-hyun/elasticsearch,nilabhsagar/elasticsearch,uschindler/elasticsearch,fred84/elasticsearch,i-am-Nathan/elasticsearch,JervyShi/elasticsearch,geidies/elasticsearch,lks21c/elasticsearch,nezirus/elasticsearch,glefloch/elasticsearch,qwerty4030/elasticsearch,LeoYao/elasticsearch,jimczi/elasticsearch,elasticdog/elasticsearch,coding0011/elasticsearch,artnowo/elasticsearch,awislowski/elasticsearch,scottsom/elasticsearch,brandonkearby/elasticsearch,wuranbo/elasticsearch,wenpos/elasticsearch,zkidkid/elasticsearch,zkidkid/elasticsearch,rajanm/elasticsearch,IanvsPoplicola/elasticsearch,bawse/elasticsearch,alexshadow007/elasticsearch,gingerwizard/elasticsearch,ThiagoGarciaAlves/elasticsearch,fforbeck/elasticsearch,njlawton/elasticsearch,ZTE-PaaS/elasticsearch,Shepard1212/elasticsearch,LeoYao/elasticsearch,fernandozhu/elasticsearch,umeshdangat/elasticsearch,fernandozhu/elasticsearch,fforbeck/elasticsearch,ricardocerq/elasticsearch,StefanGor/elasticsearch,strapdata/elassandra5-rc,sneivandt/elasticsearch,C-Bish/elasticsearch,LewayneNaidoo/elasticsearch,gmarz/elasticsearch,markwalkom/elasticsearch,rajanm/elasticsearch,rlugojr/elasticsearch,njlawton/elasticsearch,C-Bish/elasticsearch,LewayneNaidoo/elasticsearch,mohit/elasticsearch,fernandozhu/elasticsearch,GlenRSmith/elasticsearch,i-am-Nathan/elasticsearch,naveenhooda2000/elasticsearch,jimczi/elasticsearch,bawse/elasticsearch,MaineC/elasticsearch,gmarz/elasticsearch,uschindler/elasticsearch,MisterAndersen/elasticsearch,wangtuo/elasticsearch,shreejay/elasticsearch,zkidkid/elasticsearch,dongjoon-hyun/elasticsearch,henakamaMSFT/elasticsearch,rajanm/elasticsearch,brandonkearby/elasticsearch,GlenRSmith/elasticsearch,strapdata/elassandra5-rc,scorpionvicky/elasticsearch,bawse/elasticsearch,winstonewert/elasticsearch,geidies/elasticsearch,mikemccand/elasticsearch,nazarewk/elasticsearch,StefanGor/elasticsearch,spiegela/elasticsearch,qwerty4030/elasticsearch,spiegela/elasticsearch,ZTE-PaaS/elasticsearch,ZTE-PaaS/elasticsearch,ThiagoGarciaAlves/elasticsearch,sneivandt/elasticsearch,maddin2016/elasticsearch,Helen-Zhao/elasticsearch,MisterAndersen/elasticsearch,awislowski/elasticsearch,rlugojr/elasticsearch,artnowo/elasticsearch,wenpos/elasticsearch,liweinan0423/elasticsearch,MaineC/elasticsearch,girirajsharma/elasticsearch,glefloch/elasticsearch,HonzaKral/elasticsearch,pozhidaevak/elasticsearch,strapdata/elassandra5-rc,IanvsPoplicola/elasticsearch,GlenRSmith/elasticsearch,mikemccand/elasticsearch,elasticdog/elasticsearch,s1monw/elasticsearch,ThiagoGarciaAlves/elasticsearch,maddin2016/elasticsearch,alexshadow007/elasticsearch,vroyer/elasticassandra,girirajsharma/elasticsearch,umeshdangat/elasticsearch,fforbeck/elasticsearch,wuranbo/elasticsearch,mortonsykes/elasticsearch,lks21c/elasticsearch,mikemccand/elasticsearch,nknize/elasticsearch,nknize/elasticsearch,vroyer/elassandra,wuranbo/elasticsearch,brandonkearby/elasticsearch,artnowo/elasticsearch,masaruh/elasticsearch,awislowski/elasticsearch,i-am-Nathan/elasticsearch,LeoYao/elasticsearch,fforbeck/elasticsearch,coding0011/elasticsearch,liweinan0423/elasticsearch,scottsom/elasticsearch,glefloch/elasticsearch,jimczi/elasticsearch,LewayneNaidoo/elasticsearch,winstonewert/elasticsearch,JackyMai/elasticsearch,gingerwizard/elasticsearch,Helen-Zhao/elasticsearch,MaineC/elasticsearch,naveenhooda2000/elasticsearch,elasticdog/elasticsearch,nilabhsagar/elasticsearch,mortonsykes/elasticsearch,elasticdog/elasticsearch,wuranbo/elasticsearch,rajanm/elasticsearch,ThiagoGarciaAlves/elasticsearch,mikemccand/elasticsearch,winstonewert/elasticsearch,MisterAndersen/elasticsearch,kalimatas/elasticsearch,LewayneNaidoo/elasticsearch,kalimatas/elasticsearch,C-Bish/elasticsearch,girirajsharma/elasticsearch,nknize/elasticsearch,JackyMai/elasticsearch,Shepard1212/elasticsearch,masaruh/elasticsearch,JervyShi/elasticsearch,i-am-Nathan/elasticsearch,geidies/elasticsearch,shreejay/elasticsearch,scorpionvicky/elasticsearch,njlawton/elasticsearch,dongjoon-hyun/elasticsearch,liweinan0423/elasticsearch,JSCooke/elasticsearch,awislowski/elasticsearch,maddin2016/elasticsearch,markwalkom/elasticsearch,naveenhooda2000/elasticsearch,jprante/elasticsearch,s1monw/elasticsearch,artnowo/elasticsearch,fred84/elasticsearch,jimczi/elasticsearch,mortonsykes/elasticsearch,kalimatas/elasticsearch,henakamaMSFT/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.transport.client; import com.carrotsearch.randomizedtesting.RandomizedTest; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.percolator.PercolatorPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.transport.Netty4Plugin; import org.junit.Test; import java.util.Arrays; import static org.junit.Assert.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class PreBuiltTransportClientTests extends RandomizedTest { @Test public void testPluginInstalled() { try (TransportClient client = new PreBuiltTransportClient(Settings.EMPTY)) { Settings settings = client.settings(); assertEquals(Netty4Plugin.NETTY_TRANSPORT_NAME, NetworkModule.HTTP_DEFAULT_TYPE_SETTING.get(settings)); assertEquals(Netty4Plugin.NETTY_TRANSPORT_NAME, NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.get(settings)); } } @Test public void testInstallPluginTwice() { for (Class<? extends Plugin> plugin : Arrays.asList(ReindexPlugin.class, PercolatorPlugin.class, MustachePlugin.class)) { try { new PreBuiltTransportClient(Settings.EMPTY, plugin); fail("exception expected"); } catch (IllegalArgumentException ex) { assertTrue("Expected message to start with [plugin already exists: ] but was instead [" + ex.getMessage() + "]", ex.getMessage().startsWith("plugin already exists: ")); } } } }
client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.transport.client; import com.carrotsearch.randomizedtesting.RandomizedTest; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.percolator.PercolatorPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.transport.Netty4Plugin; import org.junit.Test; import java.util.Arrays; import static org.junit.Assert.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class PreBuiltTransportClientTests extends RandomizedTest { @Test public void testPluginInstalled() { try (TransportClient client = new PreBuiltTransportClient(Settings.EMPTY)) { Settings settings = client.settings(); assertEquals(Netty4Plugin.NETTY_TRANSPORT_NAME, NetworkModule.HTTP_DEFAULT_TYPE_SETTING.get(settings)); assertEquals(Netty4Plugin.NETTY_TRANSPORT_NAME, NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.get(settings)); } } @Test public void testInstallPluginTwice() { for (Class<? extends Plugin> plugin : Arrays.asList(ReindexPlugin.class, PercolatorPlugin.class, MustachePlugin.class)) { try { new PreBuiltTransportClient(Settings.EMPTY, plugin); fail("exception expected"); } catch (IllegalArgumentException ex) { assertTrue(ex.getMessage().startsWith("plugin already exists: ")); } } } }
Added failure message to test
client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java
Added failure message to test
Java
apache-2.0
e15363182c4251780a515cb174ec9b1d81023336
0
projectbuendia/buendia,projectbuendia/buendia,projectbuendia/buendia,projectbuendia/buendia,projectbuendia/buendia
// Copyright 2015 The Project Buendia Authors // // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at: http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distrib- // uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES // OR CONDITIONS OF ANY KIND, either express or implied. See the License for // specific language governing permissions and limitations under the License. package org.projectbuendia.openmrs.web.controller; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Concept; import org.openmrs.Encounter; import org.openmrs.Form; import org.openmrs.FormField; import org.openmrs.Obs; import org.openmrs.Order; import org.openmrs.Patient; import org.openmrs.PatientIdentifier; import org.openmrs.Person; import org.openmrs.api.EncounterService; import org.openmrs.api.ObsService; import org.openmrs.api.OrderService; import org.openmrs.api.PatientService; import org.openmrs.api.context.Context; import org.openmrs.projectbuendia.ClientConceptNamer; import org.openmrs.projectbuendia.Utils; import org.openmrs.projectbuendia.VisitObsValue; import org.openmrs.projectbuendia.webservices.rest.ChartResource; import org.openmrs.projectbuendia.webservices.rest.DbUtil; import org.openmrs.util.FormUtil; import org.openmrs.util.OpenmrsUtil; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; /** The controller for the profile management page. */ @Controller public class PrintCharts { protected static Log log = LogFactory.getLog(ProfileManager.class); private static final Comparator<Patient> PATIENT_COMPARATOR = new Comparator<Patient>() { @Override public int compare(Patient p1, Patient p2) { PatientIdentifier id1 = p1.getPatientIdentifier("MSF"); PatientIdentifier id2 = p2.getPatientIdentifier("MSF"); return Utils.alphanumericComparator.compare( id1 == null ? null : id1.getIdentifier(), id2 == null ? null : id2.getIdentifier() ); } }; private boolean authorized() { return Context.hasPrivilege("Manage Concepts") && Context.hasPrivilege("Manage Forms"); } public static final DateFormat HEADER_DATE_FORMAT = new SimpleDateFormat("d MMM"); private static final DateFormat ORDER_DATE_FORMAT = HEADER_DATE_FORMAT; private static final ClientConceptNamer NAMER = new ClientConceptNamer(Locale.FRENCH); private static final VisitObsValue.ObsValueVisitor<String> STRING_VISITOR = new VisitObsValue.ObsValueVisitor<String>() { @Override public String visitCoded(Concept value) { return NAMER.getClientName(value); } @Override public String visitNumeric(Double value) { return Double.toString(value); } @Override public String visitBoolean(Boolean value) { return Boolean.toString(value); } @Override public String visitText(String value) { return value; } @Override public String visitDate(Date d) { return Utils.YYYYMMDD_UTC_FORMAT.format(d); } @Override public String visitDateTime(Date d) { return Utils.SPREADSHEET_FORMAT.format(d); } }; /** This is executed every time a request is made. */ @ModelAttribute public void onStart() {} @RequestMapping(value = "/module/projectbuendia/openmrs/print-charts", method = RequestMethod.GET) public void get(HttpServletRequest request, HttpServletResponse response, ModelMap model) { post(request, response, model); } @RequestMapping(value = "/module/projectbuendia/openmrs/printable", method = RequestMethod.POST) public void post(HttpServletRequest request, HttpServletResponse response, ModelMap model) { model.addAttribute("authorized", authorized()); try { try { generateExport(request, response, model); } catch (NoProfileException e) { response.getWriter().write( "No profile loaded. Please load a profile before exporting data."); } } catch (IOException e) { // OpenMRS prints the stack trace when this happens. WIN. throw new RuntimeException(e); } } private LinkedHashMap<String, List<Concept>> buildChartModel() throws NoProfileException { LinkedHashMap<String, List<Concept>> charts = new LinkedHashMap<>(); String chartName = null; ArrayList<Concept> concepts = null; // Get the first chart. Currently the "first chart" actually contains multiple charts, the // rest of the logic in this method is parsing those. Form form = ChartResource.getCharts(Context.getFormService()).get(0); // Get the structure for that chart. TreeMap<Integer, TreeSet<FormField>> formStructure = FormUtil.getFormStructure(form); TreeSet<FormField> rootNode = formStructure.get(0); for (FormField groupField : rootNode) { if (groupField.getField().getName().equals("[chart_divider]")) { // The first child of the [chart_divider] contains the chart name. chartName = formStructure.get(groupField.getId()).first().getField().getName(); concepts = new ArrayList<>(); // Chart divider has a subfield "notes" (see profile_apply). We work around that // here by skipping when we find a chart divider. // Chart dividers are a hack anyway. continue; } for (FormField fieldInGroup : formStructure.get(groupField.getId())) { if (chartName == null) { throw new NoProfileException(); } // TODO: if this is bottleneck, use a TreeSet. Suspect it won't be because it's only // called once / export Concept concept = fieldInGroup.getField().getConcept(); if (!concepts.contains(concept)) { concepts.add(concept); } } charts.put(chartName, concepts); } return charts; } private void generateExport( HttpServletRequest request, HttpServletResponse response, ModelMap model) throws NoProfileException { PatientService patientService = Context.getPatientService(); EncounterService encounterService = Context.getEncounterService(); ObsService obsService = Context.getObsService(); OrderService orderService = Context.getOrderService(); Concept orderExecutedConcept = DbUtil.getOrderExecutedConcept(); List<Patient> patients = new ArrayList<>(patientService.getAllPatients()); Collections.sort(patients, PATIENT_COMPARATOR); LinkedHashMap<String, List<Concept>> charts = buildChartModel(); try { PrintWriter w = response.getWriter(); writeHeader(w); for (Patient patient : patients) { w.write("<h2>" + patient.getPatientIdentifier("MSF") + ". " + patient.getGivenName() + " " + patient.getFamilyName() + "</h2><hr/>"); SortedSet<Date> encounterDays = getDatesForEncounterList(encounterService.getEncountersByPatient(patient)); if (encounterDays.size() == 0) { w.write("<b>No encounters for this patient</b>"); continue; } for (Map.Entry<String, List<Concept>> chart : charts.entrySet()) { printPatientChart( obsService, chart.getKey(), chart.getValue(), w, patient, encounterDays); } printOrdersChart(obsService, orderService, orderExecutedConcept, w, patient); } writeFooter(w); } catch (IOException e) { e.printStackTrace(); } } private SortedSet<Date> getDatesForEncounterList(List<Encounter> encounters) { TreeSet<Date> encounterDays = new TreeSet<>(); final Calendar c = Calendar.getInstance(); for (Encounter encounter : encounters) { c.setTime(encounter.getEncounterDatetime()); int year = c.get(Calendar.YEAR); int month = c.get(Calendar.MONTH); int day = c.get(Calendar.DAY_OF_MONTH); //noinspection MagicConstant c.set(year, month, day, 0, 0, 0); encounterDays.add(c.getTime()); } return encounterDays; } private void printOrdersChart(ObsService obsService, OrderService orderService, Concept orderExecutedConcept, PrintWriter w, Patient patient) { Calendar calendar = Calendar.getInstance(); w.write("<h3>TREATMENT</h3>"); List<Order> orders = orderService.getAllOrdersByPatient(patient); if (orders.size() == 0) { w.write("<h3>This patient has no treatments.</h3>"); return; } List<Obs> orderExecutedObs = obsService.getObservations( Collections.<Person>singletonList(patient), null, Collections.singletonList(orderExecutedConcept), null, null, null, null, null, null, null, null, false); Pair<Date, Date> dates = getStartAndEndDateForOrders( orders, orderExecutedObs); Date start = dates.getLeft(); Date stop = dates.getRight(); int day = 1; calendar.setTime(start); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Date today = calendar.getTime(); do { w.write("<table cellpadding=\"2\" cellspacing=\"0\" border=\"1\" width=\"100%\">\n" + "\t<thead>\n" + "\t\t<th width=\"20%\">&nbsp;</th>\n"); calendar.setTime(today); for (int i = day; i < (day + 7); i++) { w.write("<th width=\"10%\">Day " + i + "<br/>" + HEADER_DATE_FORMAT.format(calendar.getTime()) + "</th>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("\t</thead>\n" + "\t<tbody>\n"); for (Order order : orders) { w.write("<tr><td>"); w.write(order.getInstructions()); w.write(" " + formatStartAndEndDatesForOrder(order)); w.write("</td>"); calendar.setTime(today); for (int i = 1; i < 8; i++) { Date dayStart = calendar.getTime(); Date dayEnd = OpenmrsUtil.getLastMomentOfDay(dayStart); List<Obs> observations = obsService.getObservations(Collections.<Person>singletonList(patient), null, Collections.singletonList(orderExecutedConcept), null, null, null, null, null, null, dayStart, dayEnd, false); String value = "&nbsp;"; if (!observations.isEmpty()) { int numGiven = 0; for (Obs observation : observations) { if (observation.getOrder().equals(order)) { numGiven++; } } if (numGiven > 0) { value = String.valueOf(numGiven); } } w.write("<td>" + value + "</td>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("</tr>"); } w.write("\t</tbody>\n" + "</table>\n"); day += 7; calendar.setTime(today); calendar.add(Calendar.DAY_OF_MONTH, 7); today = calendar.getTime(); } while (today.before(stop) || today.equals(stop)); } private String formatStartAndEndDatesForOrder(Order order) { if (order.getScheduledDate() == null) { // Shouldn't occur, but fail safe. return ""; } String startDateString = ORDER_DATE_FORMAT.format(order.getScheduledDate()); String endDateString = order.getAutoExpireDate() == null ? "*" : ORDER_DATE_FORMAT.format(order.getAutoExpireDate()); return String.format("(%s - %s)", startDateString, endDateString); } private Pair<Date, Date> getStartAndEndDateForOrders( List<Order> orders, List<Obs> orderExecutedObs) { Date start = null; Date stop = null; for (Order order : orders) { if (start == null || order.getScheduledDate().before(start)) { start = order.getScheduledDate(); } if (order.getAutoExpireDate() != null) { if (stop == null || order.getAutoExpireDate().after(stop)) { stop = order.getAutoExpireDate(); } } } for (Obs obs : orderExecutedObs) { Date obsTime = obs.getObsDatetime(); if (start == null || obsTime.before(start)) { start = obsTime; } if (stop == null || obsTime.after(stop)) { stop = obsTime; } } // This shouldn't ever occur, but fail gracefully. if (start == null) { start = new Date(); } // If all orders are unlimited orders, this will print into forever. we fix that by ending // printing at the start date. if (stop == null) { stop = start; } return Pair.of(start, stop); } private void printPatientChart(ObsService obsService, String chartName, List<Concept> questionConcepts, PrintWriter w, Patient patient, SortedSet<Date> encounterDays) { w.write("<h3>" + chartName + "</h3>"); int dayCount = 1; Date today = encounterDays.first(); Date lastDay = encounterDays.last(); do { w.write("<table cellpadding=\"2\" cellspacing=\"0\" border=\"1\" width=\"100%\">\n" + "\t<thead>\n" + "\t\t<th width=\"20%\">&nbsp;</th>\n"); Calendar calendar = Calendar.getInstance(); calendar.setTime(today); for (int i = dayCount; i < (dayCount + 7); i++) { w.write("<th width=\"10%\">Day " + i + "<br/>" + HEADER_DATE_FORMAT.format(calendar.getTime()) + "</th>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("\t</thead>\n" + "\t<tbody>\n"); for (Concept concept : questionConcepts) { // Skip concepts that aren't in use. int obsCount = obsService.getObservationCount( Collections.<Person>singletonList(patient), null, Collections.singletonList(concept), null, null, null, null, null, null, false); if (obsCount == 0) { continue; } w.write("<tr><td>"); w.write(NAMER.getClientName(concept)); w.write("</td>"); calendar.setTime(today); for (int i = 1; i < 8; i++) { Date dayStart = calendar.getTime(); Date dayEnd = OpenmrsUtil.getLastMomentOfDay(dayStart); // These are sorted by date / time by default. List<Obs> observations = obsService.getObservations( Collections.<Person>singletonList(patient), null, Collections.singletonList(concept), null, null, null, null, null, null, dayStart, dayEnd, false); ArrayList<String> values = new ArrayList<>(); for (Obs obs : observations) { values.add(VisitObsValue.visit(obs, STRING_VISITOR)); } w.write("<td>" + StringUtils.join(values, ", ") + "</td>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("</tr>"); } w.write("\t</tbody>\n" + "</table>\n"); dayCount += 7; calendar.setTime(today); calendar.add(Calendar.DAY_OF_MONTH, 7); today = calendar.getTime(); } while (today.before(lastDay) || today.equals(lastDay)); } private void writeHeader(PrintWriter w) { w.write("<!doctype html>\n" + "<html>\n" + "<head>\n" + " <meta charset=\"utf-8\">\n" + " <title>Patient Charts</title>\n" + " <style type=\"text/css\">\n" + " table { margin-bottom: 22pt; }\n" + " table, tr, thead, tbody {\n" + " page-break-inside: avoid;\n" + " }\n" + " h3 { page-break-after: avoid; }\n" + " h2 { margin 10dp; page-break-before: always;}\n" + " h2:first-child { page-break-before: auto; }\n" + " td { page-break-inside:avoid; }\n" + " thead {background-color:#D5D5D5;}\n" + " body { font-size: 10pt; }\n" // + "tr \n" // + "{ \n" // + " display: table-row-group;\n" // + " page-break-inside:avoid; \n" // + " page-break-after:auto;\n" // + "}\n" //+ "@media print {\n" //+ " thead {display: table-header-group;}\n" //+ "}" + " </style>\n" + "</head>\n" + "<body>"); } private void writeFooter(PrintWriter w) { w.write("</body>\n" + "</html>"); } private static class NoProfileException extends Exception { } }
openmrs/omod/src/main/java/org/projectbuendia/openmrs/web/controller/PrintCharts.java
// Copyright 2015 The Project Buendia Authors // // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at: http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distrib- // uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES // OR CONDITIONS OF ANY KIND, either express or implied. See the License for // specific language governing permissions and limitations under the License. package org.projectbuendia.openmrs.web.controller; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Concept; import org.openmrs.Encounter; import org.openmrs.Form; import org.openmrs.FormField; import org.openmrs.Obs; import org.openmrs.Order; import org.openmrs.Patient; import org.openmrs.PatientIdentifier; import org.openmrs.Person; import org.openmrs.api.EncounterService; import org.openmrs.api.ObsService; import org.openmrs.api.OrderService; import org.openmrs.api.PatientService; import org.openmrs.api.context.Context; import org.openmrs.projectbuendia.ClientConceptNamer; import org.openmrs.projectbuendia.Utils; import org.openmrs.projectbuendia.VisitObsValue; import org.openmrs.projectbuendia.webservices.rest.ChartResource; import org.openmrs.projectbuendia.webservices.rest.DbUtil; import org.openmrs.util.FormUtil; import org.openmrs.util.OpenmrsUtil; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; /** The controller for the profile management page. */ @Controller public class PrintCharts { protected static Log log = LogFactory.getLog(ProfileManager.class); private static final Comparator<Patient> PATIENT_COMPARATOR = new Comparator<Patient>() { @Override public int compare(Patient p1, Patient p2) { PatientIdentifier id1 = p1.getPatientIdentifier("MSF"); PatientIdentifier id2 = p2.getPatientIdentifier("MSF"); return Utils.alphanumericComparator.compare( id1 == null ? null : id1.getIdentifier(), id2 == null ? null : id2.getIdentifier() ); } }; private boolean authorized() { return Context.hasPrivilege("Manage Concepts") && Context.hasPrivilege("Manage Forms"); } public static final DateFormat HEADER_DATE_FORMAT = new SimpleDateFormat("d MMM"); private static final DateFormat ORDER_DATE_FORMAT = HEADER_DATE_FORMAT; private static final ClientConceptNamer NAMER = new ClientConceptNamer(Locale.FRENCH); private final VisitObsValue.ObsValueVisitor stringVisitor = new VisitObsValue.ObsValueVisitor<String>() { @Override public String visitCoded(Concept value) { return NAMER.getClientName(value); } @Override public String visitNumeric(Double value) { return Double.toString(value); } @Override public String visitBoolean(Boolean value) { return Boolean.toString(value); } @Override public String visitText(String value) { return value; } @Override public String visitDate(Date d) { return Utils.YYYYMMDD_UTC_FORMAT.format(d); } @Override public String visitDateTime(Date d) { return Utils.SPREADSHEET_FORMAT.format(d); } }; /** This is executed every time a request is made. */ @ModelAttribute public void onStart() {} @RequestMapping(value = "/module/projectbuendia/openmrs/print-charts", method = RequestMethod.GET) public void get(HttpServletRequest request, ModelMap model) { model.addAttribute("authorized", authorized()); } @RequestMapping(value = "/module/projectbuendia/openmrs/printable", method = RequestMethod.POST) public void post(HttpServletRequest request, HttpServletResponse response, ModelMap model) { try { try { generateExport(request, response, model); } catch (NoProfileException e) { response.getWriter().write( "No profile loaded. Please load a profile before exporting data."); } } catch (IOException e) { // OpenMRS prints the stack trace when this happens. WIN. throw new RuntimeException(e); } } private LinkedHashMap<String, List<Concept>> buildChartModel() throws NoProfileException { LinkedHashMap<String, List<Concept>> charts = new LinkedHashMap<>(); String chartName = null; ArrayList<Concept> concepts = null; // Get the first chart. Currently the "first chart" actually contains multiple charts, the // rest of the logic in this method is parsing those. Form form = ChartResource.getCharts(Context.getFormService()).get(0); // Get the structure for that chart. TreeMap<Integer, TreeSet<FormField>> formStructure = FormUtil.getFormStructure(form); TreeSet<FormField> rootNode = formStructure.get(0); for (FormField groupField : rootNode) { if (groupField.getField().getName().equals("[chart_divider]")) { // The first child of the [chart_divider] contains the chart name. chartName = formStructure.get(groupField.getId()).first().getField().getName(); concepts = new ArrayList<>(); // Chart divider has a subfield "notes" (see profile_apply). We work around that // here by skipping when we find a chart divider. // Chart dividers are a hack anyway. continue; } for (FormField fieldInGroup : formStructure.get(groupField.getId())) { if (chartName == null) { throw new NoProfileException(); } // TODO: if this is bottleneck, use a TreeSet. Suspect it won't be because it's only // called once / export Concept concept = fieldInGroup.getField().getConcept(); if (!concepts.contains(concept)) { concepts.add(concept); } } charts.put(chartName, concepts); } return charts; } private void generateExport( HttpServletRequest request, HttpServletResponse response, ModelMap model) throws NoProfileException { PatientService patientService = Context.getPatientService(); EncounterService encounterService = Context.getEncounterService(); ObsService obsService = Context.getObsService(); OrderService orderService = Context.getOrderService(); Concept orderExecutedConcept = DbUtil.getOrderExecutedConcept(); List<Patient> patients = new ArrayList<>(patientService.getAllPatients()); Collections.sort(patients, PATIENT_COMPARATOR); LinkedHashMap<String, List<Concept>> charts = buildChartModel(); try { PrintWriter w = response.getWriter(); writeHeader(w); for (Patient patient : patients) { w.write("<h2>" + patient.getPatientIdentifier("MSF") + ". " + patient.getGivenName() + " " + patient.getFamilyName() + "</h2><hr/>"); SortedSet<Date> encounterDays = getDatesForEncounterList(encounterService.getEncountersByPatient(patient)); if (encounterDays.size() == 0) { w.write("<b>No encounters for this patient</b>"); continue; } for (Map.Entry<String, List<Concept>> chart : charts.entrySet()) { printPatientChart( obsService, chart.getKey(), chart.getValue(), w, patient, encounterDays); } printOrdersChart(obsService, orderService, orderExecutedConcept, w, patient); } writeFooter(w); } catch (IOException e) { e.printStackTrace(); } } private SortedSet<Date> getDatesForEncounterList(List<Encounter> encounters) { TreeSet<Date> encounterDays = new TreeSet<>(); final Calendar c = Calendar.getInstance(); for (Encounter encounter : encounters) { c.setTime(encounter.getEncounterDatetime()); int year = c.get(Calendar.YEAR); int month = c.get(Calendar.MONTH); int day = c.get(Calendar.DAY_OF_MONTH); //noinspection MagicConstant c.set(year, month, day, 0, 0, 0); encounterDays.add(c.getTime()); } return encounterDays; } private void printOrdersChart(ObsService obsService, OrderService orderService, Concept orderExecutedConcept, PrintWriter w, Patient patient) { Calendar calendar = Calendar.getInstance(); w.write("<h3>TREATMENT</h3>"); List<Order> orders = orderService.getAllOrdersByPatient(patient); if (orders.size() == 0) { w.write("<h3>This patient has no treatments.</h3>"); return; } List<Obs> orderExecutedObs = obsService.getObservations( Collections.<Person>singletonList(patient), null, Collections.singletonList(orderExecutedConcept), null, null, null, null, null, null, null, null, false); Pair<Date, Date> dates = getStartAndEndDateForOrders( orders, orderExecutedObs); Date start = dates.getLeft(); Date stop = dates.getRight(); int day = 1; calendar.setTime(start); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Date today = calendar.getTime(); do { w.write("<table cellpadding=\"2\" cellspacing=\"0\" border=\"1\" width=\"100%\">\n" + "\t<thead>\n" + "\t\t<th width=\"20%\">&nbsp;</th>\n"); calendar.setTime(today); for (int i = day; i < (day + 7); i++) { w.write("<th width=\"10%\">Day " + i + "<br/>" + HEADER_DATE_FORMAT.format(calendar.getTime()) + "</th>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("\t</thead>\n" + "\t<tbody>\n"); for (Order order : orders) { w.write("<tr><td>"); w.write(order.getInstructions()); w.write(" " + formatStartAndEndDatesForOrder(order)); w.write("</td>"); calendar.setTime(today); for (int i = 1; i < 8; i++) { Date dayStart = calendar.getTime(); Date dayEnd = OpenmrsUtil.getLastMomentOfDay(dayStart); List<Obs> observations = obsService.getObservations(Collections.<Person>singletonList(patient), null, Collections.singletonList(orderExecutedConcept), null, null, null, null, null, null, dayStart, dayEnd, false); String value = "&nbsp;"; if (!observations.isEmpty()) { int numGiven = 0; for (Obs observation : observations) { if (observation.getOrder().equals(order)) { numGiven++; } } if (numGiven > 0) { value = String.valueOf(numGiven); } } w.write("<td>" + value + "</td>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("</tr>"); } w.write("\t</tbody>\n" + "</table>\n"); day += 7; calendar.setTime(today); calendar.add(Calendar.DAY_OF_MONTH, 7); today = calendar.getTime(); } while (today.before(stop) || today.equals(stop)); } private String formatStartAndEndDatesForOrder(Order order) { if (order.getScheduledDate() == null) { // Shouldn't occur, but fail safe. return ""; } String startDateString = ORDER_DATE_FORMAT.format(order.getScheduledDate()); String endDateString = order.getAutoExpireDate() == null ? "*" : ORDER_DATE_FORMAT.format(order.getAutoExpireDate()); return String.format("(%s - %s)", startDateString, endDateString); } private Pair<Date, Date> getStartAndEndDateForOrders( List<Order> orders, List<Obs> orderExecutedObs) { Date start = null; Date stop = null; for (Order order : orders) { if (start == null || order.getScheduledDate().before(start)) { start = order.getScheduledDate(); } if (order.getAutoExpireDate() != null) { if (stop == null || order.getAutoExpireDate().after(stop)) { stop = order.getAutoExpireDate(); } } } for (Obs obs : orderExecutedObs) { Date obsTime = obs.getObsDatetime(); if (start == null || obsTime.before(start)) { start = obsTime; } if (stop == null || obsTime.after(stop)) { stop = obsTime; } } // This shouldn't ever occur, but fail gracefully. if (start == null) { start = new Date(); } // If all orders are unlimited orders, this will print into forever. we fix that by ending // printing at the start date. if (stop == null) { stop = start; } return Pair.of(start, stop); } private void printPatientChart(ObsService obsService, String chartName, List<Concept> questionConcepts, PrintWriter w, Patient patient, SortedSet<Date> encounterDays) { w.write("<h3>" + chartName + "</h3>"); int dayCount = 1; Date today = encounterDays.first(); Date lastDay = encounterDays.last(); do { w.write("<table cellpadding=\"2\" cellspacing=\"0\" border=\"1\" width=\"100%\">\n" + "\t<thead>\n" + "\t\t<th width=\"20%\">&nbsp;</th>\n"); Calendar calendar = Calendar.getInstance(); calendar.setTime(today); for (int i = dayCount; i < (dayCount + 7); i++) { w.write("<th width=\"10%\">Day " + i + "<br/>" + HEADER_DATE_FORMAT.format(calendar.getTime()) + "</th>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("\t</thead>\n" + "\t<tbody>\n"); for (Concept concept : questionConcepts) { // Skip concepts that aren't in use. int obsCount = obsService.getObservationCount( Collections.<Person>singletonList(patient), null, Collections.singletonList(concept), null, null, null, null, null, null, false); if (obsCount == 0) { continue; } w.write("<tr><td>"); w.write(NAMER.getClientName(concept)); w.write("</td>"); calendar.setTime(today); for (int i = 1; i < 8; i++) { Date dayStart = calendar.getTime(); Date dayEnd = OpenmrsUtil.getLastMomentOfDay(dayStart); List<Obs> observations = obsService.getObservations( Collections.<Person>singletonList(patient), null, Collections.singletonList(concept), null, null, null, null, 1, null, dayStart, dayEnd, false); String value = "&nbsp;"; if (!observations.isEmpty()) { // TODO: multiple observations in this cell. Maybe with times? IDK. value = (String) VisitObsValue.visit(observations.get(0), stringVisitor); } w.write("<td>" + value + "</td>"); calendar.add(Calendar.DAY_OF_MONTH, 1); } w.write("</tr>"); } w.write("\t</tbody>\n" + "</table>\n"); dayCount += 7; calendar.setTime(today); calendar.add(Calendar.DAY_OF_MONTH, 7); today = calendar.getTime(); } while (today.before(lastDay) || today.equals(lastDay)); } private void writeHeader(PrintWriter w) { w.write("<!doctype html>\n" + "<html>\n" + "<head>\n" + " <meta charset=\"utf-8\">\n" + " <title>Patient Charts</title>\n" + " <style type=\"text/css\">\n" + " table { page-break-inside:auto; margin-bottom: 22pt; }\n" //+ " tr { page-break-inside:avoid; page-break-after:auto }\n" //+ " thead { display:table-header-group }\n" //+ " tfoot { display:table-footer-group }\n" + " h2 { margin 10dp; page-break-before: always;}\n" + " td { page-break-inside:avoid; }\n" + " thead {background-color:#D5D5D5;}\n" // + "tr \n" // + "{ \n" // + " display: table-row-group;\n" // + " page-break-inside:avoid; \n" // + " page-break-after:auto;\n" // + "}\n" //+ "@media print {\n" //+ " thead {display: table-header-group;}\n" //+ "}" + " </style>\n" + "</head>\n" + "<body>"); } private void writeFooter(PrintWriter w) { w.write("</body>\n" + "</html>"); } private static class NoProfileException extends Exception { } }
More printable charts tweaks: - Reduce Font size to 10pt. We could probably even go down to 9pt or 8pt based on current charts. - Supports HTTP GET operations now. - Tables don't break if it can be avoided. This has the implication that the header often ends up by itself on another page - I'm not sure if this is better or worse. - Multiple values for the same day are concatenated together, in order, but not with timestamps.
openmrs/omod/src/main/java/org/projectbuendia/openmrs/web/controller/PrintCharts.java
More printable charts tweaks:
Java
apache-2.0
a4a1960776a9f7e7765448ab38a42aef054f327e
0
rozza/mongo-java-driver,rozza/mongo-java-driver,jyemin/mongo-java-driver,jyemin/mongo-java-driver
/* * Copyright 2008-present MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb.internal.connection; import com.mongodb.MongoClientException; import com.mongodb.MongoConfigurationException; import com.mongodb.MongoException; import com.mongodb.MongoTimeoutException; import com.mongodb.ServerAddress; import com.mongodb.async.FutureResultCallback; import com.mongodb.connection.ClusterConnectionMode; import com.mongodb.connection.ClusterId; import com.mongodb.connection.ClusterSettings; import com.mongodb.connection.ServerConnectionState; import com.mongodb.connection.ServerDescription; import com.mongodb.connection.ServerSettings; import com.mongodb.connection.ServerType; import com.mongodb.selector.ServerSelector; import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.RepeatedTest; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class LoadBalancedClusterTest { private LoadBalancedCluster cluster; @BeforeEach public void after() { if (cluster != null) { cluster.close(); } } @Test public void shouldSelectServerWhenThereIsNoSRVLookup() { // given ServerAddress serverAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .hosts(Collections.singletonList(serverAddress)) .build(); ClusterableServerFactory serverFactory = mockServerFactory(serverAddress, expectedServer); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, mock(DnsSrvRecordMonitorFactory.class)); // when ServerTuple serverTuple = cluster.selectServer(mock(ServerSelector.class)); // then assertServerTupleExpectations(serverAddress, expectedServer, serverTuple); // when FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); serverTuple = callback.get(); // then assertServerTupleExpectations(serverAddress, expectedServer, serverTuple); } @Test public void shouldSelectServerWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); // when ServerTuple serverTuple = cluster.selectServer(mock(ServerSelector.class)); // then assertServerTupleExpectations(resolvedServerAddress, expectedServer, serverTuple); } @Test public void shouldSelectServerAsynchronouslyWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); // when FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); ServerTuple serverTuple = callback.get(); // then assertServerTupleExpectations(resolvedServerAddress, expectedServer, serverTuple); } @Test public void shouldFailSelectServerWhenThereIsSRVMisconfiguration() { // given String srvHostName = "foo.bar.com"; ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .hosts(Arrays.asList(new ServerAddress("host1"), new ServerAddress("host2")))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); MongoClientException exception = assertThrows(MongoClientException.class, () -> cluster.selectServer(mock(ServerSelector.class))); assertEquals("In load balancing mode, the host must resolve to a single SRV record, but instead it resolved to multiple hosts", exception.getMessage()); } @Test public void shouldFailSelectServerAsynchronouslyWhenThereIsSRVMisconfiguration() { // given String srvHostName = "foo.bar.com"; ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .hosts(Arrays.asList(new ServerAddress("host1"), new ServerAddress("host2")))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); MongoClientException exception = assertThrows(MongoClientException.class, callback::get); assertEquals("In load balancing mode, the host must resolve to a single SRV record, but instead it resolved to multiple hosts", exception.getMessage()); } @Test public void shouldTimeoutSelectServerWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)).sleepTime(Duration.ofHours(1))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, () -> cluster.selectServer(mock(ServerSelector.class))); assertEquals("Timed out after 5 ms while waiting to resolve SRV records for foo.bar.com.", exception.getMessage()); } @Test public void shouldTimeoutSelectServerWhenThereIsSRVLookupException() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(10, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .sleepTime(Duration.ofMillis(1)) .exception(new MongoConfigurationException("Unable to resolve SRV record"))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, () -> cluster.selectServer(mock(ServerSelector.class))); assertEquals("Timed out after 10 ms while waiting to resolve SRV records for foo.bar.com. " + "Resolution exception was 'com.mongodb.MongoConfigurationException: Unable to resolve SRV record'", exception.getMessage()); } @Test public void shouldTimeoutSelectServerAsynchronouslyWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings .builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)).sleepTime(Duration.ofHours(1))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, callback::get); assertEquals("Timed out after 5 ms while waiting to resolve SRV records for foo.bar.com.", exception.getMessage()); } @Test public void shouldTimeoutSelectServerAsynchronouslyWhenThereIsSRVLookupException() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(10, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .sleepTime(Duration.ofMillis(1)) .exception(new MongoConfigurationException("Unable to resolve SRV record"))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, callback::get); assertEquals("Timed out after 10 ms while waiting to resolve SRV records for foo.bar.com. " + "Resolution exception was 'com.mongodb.MongoConfigurationException: Unable to resolve SRV record'", exception.getMessage()); } @Test void shouldNotInitServerAfterClosing() { // prepare mocks ClusterSettings clusterSettings = ClusterSettings.builder().mode(ClusterConnectionMode.LOAD_BALANCED).srvHost("foo.bar.com").build(); ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(mock(ServerSettings.class)); DnsSrvRecordMonitorFactory srvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(srvRecordMonitorFactory.create(any(), eq(clusterSettings.getSrvServiceName()), any(DnsSrvRecordInitializer.class))).thenReturn(mock(DnsSrvRecordMonitor.class)); ArgumentCaptor<DnsSrvRecordInitializer> serverInitializerCaptor = ArgumentCaptor.forClass(DnsSrvRecordInitializer.class); // create `cluster` and capture its `DnsSrvRecordInitializer` (server initializer) LoadBalancedCluster cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, srvRecordMonitorFactory); verify(srvRecordMonitorFactory, times(1)).create(any(), eq(clusterSettings.getSrvServiceName()), serverInitializerCaptor.capture()); // close `cluster`, call `DnsSrvRecordInitializer.initialize` and check that it does not result in creating a `ClusterableServer` cluster.close(); serverInitializerCaptor.getValue().initialize(Collections.singleton(new ServerAddress())); verify(serverFactory, never()).create(any(), any(), any(), any()); } @Test void shouldCloseServerWhenClosing() { // prepare mocks ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(mock(ServerSettings.class)); ClusterableServer server = mock(ClusterableServer.class); when(serverFactory.create(any(), any(), any(), any())).thenReturn(server); // create `cluster` and check that it creates a `ClusterableServer` LoadBalancedCluster cluster = new LoadBalancedCluster(new ClusterId(), ClusterSettings.builder().mode(ClusterConnectionMode.LOAD_BALANCED).build(), serverFactory, mock(DnsSrvRecordMonitorFactory.class)); verify(serverFactory, times(1)).create(any(), any(), any(), any()); // close `cluster` and check that it closes `server` cluster.close(); verify(server, atLeastOnce()).close(); } @RepeatedTest(value = 10, name = RepeatedTest.LONG_DISPLAY_NAME) @Tag("Slow") public void synchronousConcurrentTest() throws InterruptedException, ExecutionException, TimeoutException { String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); Duration srvResolutionTime = Duration.ofSeconds(5); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)).sleepTime(srvResolutionTime)); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); int numThreads = 100; ExecutorService executorService = Executors.newFixedThreadPool(numThreads); List<Future<?>> futures = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { futures.add(executorService.submit(() -> { boolean success = false; while (!success) { try { cluster.selectServer(mock(ServerSelector.class)); success = true; } catch (MongoTimeoutException e) { // this is expected } } // Keep going for a little while for (int j = 0; j < 100; j++) { cluster.selectServer(mock(ServerSelector.class)); } })); } for (Future<?> future : futures) { future.get(10, SECONDS); } executorService.shutdownNow(); } @RepeatedTest(value = 10, name = RepeatedTest.LONG_DISPLAY_NAME) @Tag("Slow") public void asynchronousConcurrentTest() throws InterruptedException, ExecutionException, TimeoutException { String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); Duration srvResolutionTime = Duration.ofSeconds(5); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); AtomicReference<TestDnsSrvRecordMonitor> dnsSrvRecordMonitorReference = new AtomicReference<>(); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> { TestDnsSrvRecordMonitor dnsSrvRecordMonitor = new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .sleepTime(srvResolutionTime); dnsSrvRecordMonitorReference.set(dnsSrvRecordMonitor); return dnsSrvRecordMonitor; }); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); int numThreads = 10; List<List<FutureResultCallback<ServerTuple>>> callbacksList = new ArrayList<>(numThreads); ExecutorService executorService = Executors.newFixedThreadPool(numThreads); List<Future<?>> futures = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { List<FutureResultCallback<ServerTuple>> callbacks = new ArrayList<>(); callbacksList.add(callbacks); futures.add(executorService.submit(() -> { while (!dnsSrvRecordMonitorReference.get().isInitialized()) { FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); callbacks.add(callback); cluster.selectServerAsync(mock(ServerSelector.class), callback); } // Keep going for a little while for (int j = 0; j < 100; j++) { FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); callbacks.add(callback); cluster.selectServerAsync(mock(ServerSelector.class), callback); } })); } for (Future<?> future : futures) { future.get(10, SECONDS); } executorService.shutdownNow(); for (List<FutureResultCallback<ServerTuple>> callbacks : callbacksList) { boolean foundFirstNonExceptionResult = false; for (FutureResultCallback<ServerTuple> curCallback : callbacks) { assertFalse(curCallback.wasInvokedMultipleTimes()); assertTrue(curCallback.isDone()); if (!curCallback.isCompletedExceptionally()) { foundFirstNonExceptionResult = true; } if (foundFirstNonExceptionResult) { assertFalse(curCallback.isCompletedExceptionally()); } } } } private void assertServerTupleExpectations(final ServerAddress serverAddress, final ClusterableServer expectedServer, final ServerTuple serverTuple) { assertEquals(expectedServer, serverTuple.getServer()); // Can't just use assertEquals here because the equals method compares lastUpdateTimeNanos property, which won't ever be the same ServerDescription serverDescription = serverTuple.getServerDescription(); assertTrue(serverDescription.isOk()); assertEquals(ServerConnectionState.CONNECTED, serverDescription.getState()); assertEquals(serverAddress, serverDescription.getAddress()); assertEquals(ServerType.LOAD_BALANCER, serverDescription.getType()); } @NotNull private ClusterableServerFactory mockServerFactory(final ServerAddress serverAddress, final ClusterableServer expectedServer) { ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(ServerSettings.builder().build()); when(serverFactory.create(eq(serverAddress), any(), any(), any())).thenReturn(expectedServer); return serverFactory; } @NotNull private ClusterableServerFactory mockServerFactory() { ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(ServerSettings.builder().build()); return serverFactory; } private static class TestDnsSrvRecordMonitor implements DnsSrvRecordMonitor { private final DnsSrvRecordInitializer initializer; private Duration sleepTime; private Thread thread; private Collection<ServerAddress> hosts; private MongoException exception; private volatile boolean initialized; TestDnsSrvRecordMonitor(final DnsSrvRecordInitializer initializer) { this.initializer = initializer; sleepTime = Duration.ofMillis(50); hosts = Collections.singletonList(new ServerAddress("host1")); } TestDnsSrvRecordMonitor sleepTime(final Duration sleepTime) { this.sleepTime = sleepTime; return this; } TestDnsSrvRecordMonitor hosts(final Collection<ServerAddress> hosts) { this.hosts = hosts; return this; } public TestDnsSrvRecordMonitor exception(final MongoException exception) { this.exception = exception; return this; } public boolean isInitialized() { return initialized; } @Override public void start() { thread = new Thread(() -> { try { Thread.sleep(sleepTime.toMillis()); if (exception != null) { initializer.initialize(exception); } else { initializer.initialize(hosts); } initialized = true; } catch (InterruptedException e) { // ignore } }); thread.start(); } @Override public void close() { if (thread != null) { thread.interrupt(); } } } }
driver-core/src/test/unit/com/mongodb/internal/connection/LoadBalancedClusterTest.java
/* * Copyright 2008-present MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb.internal.connection; import com.mongodb.MongoClientException; import com.mongodb.MongoConfigurationException; import com.mongodb.MongoException; import com.mongodb.MongoTimeoutException; import com.mongodb.ServerAddress; import com.mongodb.async.FutureResultCallback; import com.mongodb.connection.ClusterConnectionMode; import com.mongodb.connection.ClusterId; import com.mongodb.connection.ClusterSettings; import com.mongodb.connection.ServerConnectionState; import com.mongodb.connection.ServerDescription; import com.mongodb.connection.ServerSettings; import com.mongodb.connection.ServerType; import com.mongodb.selector.ServerSelector; import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.RepeatedTest; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class LoadBalancedClusterTest { private LoadBalancedCluster cluster; @BeforeEach public void after() { if (cluster != null) { cluster.close(); } } @Test public void shouldSelectServerWhenThereIsNoSRVLookup() { // given ServerAddress serverAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .hosts(Collections.singletonList(serverAddress)) .build(); ClusterableServerFactory serverFactory = mockServerFactory(serverAddress, expectedServer); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, mock(DnsSrvRecordMonitorFactory.class)); // when ServerTuple serverTuple = cluster.selectServer(mock(ServerSelector.class)); // then assertServerTupleExpectations(serverAddress, expectedServer, serverTuple); // when FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); serverTuple = callback.get(); // then assertServerTupleExpectations(serverAddress, expectedServer, serverTuple); } @Test public void shouldSelectServerWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); // when ServerTuple serverTuple = cluster.selectServer(mock(ServerSelector.class)); // then assertServerTupleExpectations(resolvedServerAddress, expectedServer, serverTuple); } @Test public void shouldSelectServerAsynchronouslyWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); // when FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); ServerTuple serverTuple = callback.get(); // then assertServerTupleExpectations(resolvedServerAddress, expectedServer, serverTuple); } @Test public void shouldFailSelectServerWhenThereIsSRVMisconfiguration() { // given String srvHostName = "foo.bar.com"; ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .hosts(Arrays.asList(new ServerAddress("host1"), new ServerAddress("host2")))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); MongoClientException exception = assertThrows(MongoClientException.class, () -> cluster.selectServer(mock(ServerSelector.class))); assertEquals("In load balancing mode, the host must resolve to a single SRV record, but instead it resolved to multiple hosts", exception.getMessage()); } @Test public void shouldFailSelectServerAsynchronouslyWhenThereIsSRVMisconfiguration() { // given String srvHostName = "foo.bar.com"; ClusterSettings clusterSettings = ClusterSettings.builder() .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .hosts(Arrays.asList(new ServerAddress("host1"), new ServerAddress("host2")))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); MongoClientException exception = assertThrows(MongoClientException.class, callback::get); assertEquals("In load balancing mode, the host must resolve to a single SRV record, but instead it resolved to multiple hosts", exception.getMessage()); } @Test public void shouldTimeoutSelectServerWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)).sleepTime(Duration.ofHours(1))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, () -> cluster.selectServer(mock(ServerSelector.class))); assertEquals("Timed out after 5 ms while waiting to resolve SRV records for foo.bar.com.", exception.getMessage()); } @Test public void shouldTimeoutSelectServerWhenThereIsSRVLookupException() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(10, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .sleepTime(Duration.ofMillis(1)) .exception(new MongoConfigurationException("Unable to resolve SRV record"))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, () -> cluster.selectServer(mock(ServerSelector.class))); assertEquals("Timed out after 10 ms while waiting to resolve SRV records for foo.bar.com. " + "Resolution exception was 'com.mongodb.MongoConfigurationException: Unable to resolve SRV record'", exception.getMessage()); } @Test public void shouldTimeoutSelectServerAsynchronouslyWhenThereIsSRVLookup() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings .builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)).sleepTime(Duration.ofHours(1))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, callback::get); assertEquals("Timed out after 5 ms while waiting to resolve SRV records for foo.bar.com.", exception.getMessage()); } @Test public void shouldTimeoutSelectServerAsynchronouslyWhenThereIsSRVLookupException() { // given String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(10, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(2)) .sleepTime(Duration.ofMillis(1)) .exception(new MongoConfigurationException("Unable to resolve SRV record"))); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); cluster.selectServerAsync(mock(ServerSelector.class), callback); MongoTimeoutException exception = assertThrows(MongoTimeoutException.class, callback::get); assertEquals("Timed out after 10 ms while waiting to resolve SRV records for foo.bar.com. " + "Resolution exception was 'com.mongodb.MongoConfigurationException: Unable to resolve SRV record'", exception.getMessage()); } @Test void shouldNotInitServerAfterClosing() { // prepare mocks ClusterSettings clusterSettings = ClusterSettings.builder().mode(ClusterConnectionMode.LOAD_BALANCED).srvHost("foo.bar.com").build(); ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(mock(ServerSettings.class)); DnsSrvRecordMonitorFactory srvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(srvRecordMonitorFactory.create(any(), eq(clusterSettings.getSrvServiceName()), any(DnsSrvRecordInitializer.class))).thenReturn(mock(DnsSrvRecordMonitor.class)); ArgumentCaptor<DnsSrvRecordInitializer> serverInitializerCaptor = ArgumentCaptor.forClass(DnsSrvRecordInitializer.class); // create `cluster` and capture its `DnsSrvRecordInitializer` (server initializer) LoadBalancedCluster cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, srvRecordMonitorFactory); verify(srvRecordMonitorFactory, times(1)).create(any(), eq(clusterSettings.getSrvServiceName()), serverInitializerCaptor.capture()); // close `cluster`, call `DnsSrvRecordInitializer.initialize` and check that it does not result in creating a `ClusterableServer` cluster.close(); serverInitializerCaptor.getValue().initialize(Collections.singleton(new ServerAddress())); verify(serverFactory, never()).create(any(), any(), any(), any()); } @Test void shouldCloseServerWhenClosing() { // prepare mocks ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(mock(ServerSettings.class)); ClusterableServer server = mock(ClusterableServer.class); when(serverFactory.create(any(), any(), any(), any())).thenReturn(server); // create `cluster` and check that it creates a `ClusterableServer` LoadBalancedCluster cluster = new LoadBalancedCluster(new ClusterId(), ClusterSettings.builder().mode(ClusterConnectionMode.LOAD_BALANCED).build(), serverFactory, mock(DnsSrvRecordMonitorFactory.class)); verify(serverFactory, times(1)).create(any(), any(), any(), any()); // close `cluster` and check that it closes `server` cluster.close(); verify(server, atLeastOnce()).close(); } @RepeatedTest(value = 10, name = RepeatedTest.LONG_DISPLAY_NAME) @Tag("Slow") public void synchronousConcurrentTest() throws InterruptedException, ExecutionException, TimeoutException { String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); Duration srvResolutionTime = Duration.ofSeconds(5); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> new TestDnsSrvRecordMonitor(invocation.getArgument(1)).sleepTime(srvResolutionTime)); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); int numThreads = 100; ExecutorService executorService = Executors.newFixedThreadPool(numThreads); List<Future<?>> futures = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { futures.add(executorService.submit(() -> { boolean success = false; while (!success) { try { cluster.selectServer(mock(ServerSelector.class)); success = true; } catch (MongoTimeoutException e) { // this is expected } } // Keep going for a little while for (int j = 0; j < 100; j++) { cluster.selectServer(mock(ServerSelector.class)); } })); } for (Future<?> future : futures) { future.get(10, SECONDS); } executorService.shutdownNow(); } @RepeatedTest(value = 10, name = RepeatedTest.LONG_DISPLAY_NAME) @Tag("Slow") public void asynchronousConcurrentTest() throws InterruptedException, ExecutionException, TimeoutException { String srvHostName = "foo.bar.com"; ServerAddress resolvedServerAddress = new ServerAddress("host1"); ClusterableServer expectedServer = mock(ClusterableServer.class); ClusterSettings clusterSettings = ClusterSettings.builder() .serverSelectionTimeout(5, MILLISECONDS) .mode(ClusterConnectionMode.LOAD_BALANCED) .srvHost(srvHostName) .build(); ClusterableServerFactory serverFactory = mockServerFactory(resolvedServerAddress, expectedServer); Duration srvResolutionTime = Duration.ofSeconds(5); DnsSrvRecordMonitorFactory dnsSrvRecordMonitorFactory = mock(DnsSrvRecordMonitorFactory.class); AtomicReference<TestDnsSrvRecordMonitor> dnsSrvRecordMonitorReference = new AtomicReference<>(); when(dnsSrvRecordMonitorFactory.create(eq(srvHostName), eq(clusterSettings.getSrvServiceName()), any())).thenAnswer( invocation -> { TestDnsSrvRecordMonitor dnsSrvRecordMonitor = new TestDnsSrvRecordMonitor(invocation.getArgument(1)) .sleepTime(srvResolutionTime); dnsSrvRecordMonitorReference.set(dnsSrvRecordMonitor); return dnsSrvRecordMonitor; }); cluster = new LoadBalancedCluster(new ClusterId(), clusterSettings, serverFactory, dnsSrvRecordMonitorFactory); int numThreads = 10; List<List<FutureResultCallback<ServerTuple>>> callbacksList = new ArrayList<>(numThreads); ExecutorService executorService = Executors.newFixedThreadPool(numThreads); List<Future<?>> futures = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { List<FutureResultCallback<ServerTuple>> callbacks = new ArrayList<>(); callbacksList.add(callbacks); futures.add(executorService.submit(() -> { while (!dnsSrvRecordMonitorReference.get().isInitialized()) { FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); callbacks.add(callback); cluster.selectServerAsync(mock(ServerSelector.class), callback); } // Keep going for a little while for (int j = 0; j < 100; j++) { FutureResultCallback<ServerTuple> callback = new FutureResultCallback<>(); callbacks.add(callback); cluster.selectServerAsync(mock(ServerSelector.class), callback); } })); } for (Future<?> future : futures) { future.get(10, SECONDS); } executorService.shutdownNow(); for (List<FutureResultCallback<ServerTuple>> callbacks : callbacksList) { boolean foundFirstNonExceptionResult = false; for (FutureResultCallback<ServerTuple> curCallback : callbacks) { assertFalse(curCallback.wasInvokedMultipleTimes()); assertTrue(curCallback.isDone()); if (!curCallback.isCompletedExceptionally()) { foundFirstNonExceptionResult = true; } if (foundFirstNonExceptionResult) { assertFalse(curCallback.isCompletedExceptionally()); } } } } private void assertServerTupleExpectations(final ServerAddress serverAddress, final ClusterableServer expectedServer, final ServerTuple serverTuple) { assertEquals(expectedServer, serverTuple.getServer()); // Can't just use assertEquals here because the equals method compares lastUpdateTimeNanos property, which won't ever be the same ServerDescription serverDescription = serverTuple.getServerDescription(); assertTrue(serverDescription.isOk()); assertEquals(ServerConnectionState.CONNECTED, serverDescription.getState()); assertEquals(serverAddress, serverDescription.getAddress()); assertEquals(ServerType.LOAD_BALANCER, serverDescription.getType()); } @NotNull private ClusterableServerFactory mockServerFactory(final ServerAddress serverAddress, final ClusterableServer expectedServer) { ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(ServerSettings.builder().build()); when(serverFactory.create(eq(serverAddress), any(), any(), any())).thenReturn(expectedServer); return serverFactory; } @NotNull private ClusterableServerFactory mockServerFactory() { ClusterableServerFactory serverFactory = mock(ClusterableServerFactory.class); when(serverFactory.getSettings()).thenReturn(ServerSettings.builder().build()); return serverFactory; } private static class TestDnsSrvRecordMonitor implements DnsSrvRecordMonitor { private final DnsSrvRecordInitializer initializer; private Duration sleepTime; private Thread thread; private Collection<ServerAddress> hosts; private MongoException exception; private volatile boolean initialized; TestDnsSrvRecordMonitor(final DnsSrvRecordInitializer initializer) { this.initializer = initializer; sleepTime = Duration.ofMillis(50); hosts = Collections.singletonList(new ServerAddress("host1")); } TestDnsSrvRecordMonitor sleepTime(final Duration sleepTime) { this.sleepTime = sleepTime; return this; } TestDnsSrvRecordMonitor hosts(final Collection<ServerAddress> hosts) { this.hosts = hosts; return this; } public TestDnsSrvRecordMonitor exception(final MongoException exception) { this.exception = exception; return this; } public boolean isInitialized() { return initialized; } @Override public void start() { thread = new Thread(() -> { try { Thread.sleep(sleepTime.toMillis()); if (exception != null) { initializer.initialize(exception); } else { initializer.initialize(hosts); } initialized = true; } catch (InterruptedException e) { // ignore } }); thread.start(); } @Override public void close() { if (thread != null) { thread.interrupt(); } } } }
Update broken Mockito tests JAVA-4239
driver-core/src/test/unit/com/mongodb/internal/connection/LoadBalancedClusterTest.java
Update broken Mockito tests
Java
apache-2.0
8d7aab06c0935116ab7f5df6039f03ca1d6ec137
0
redhat-iot/cloudera-iot-demo,redhat-iot/cloudera-iot-demo,redhat-iot/cloudera-iot-demo,redhat-iot/cloudera-iot-demo
gateway/com.redhat.iot.cloudera.demo.simulator/src/main/java/com/redhat/iot/demo/jpmml/ModelRouter.java
package com.redhat.iot.demo.jpmml; import org.apache.camel.*; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.dataformat.csv.CsvDataFormat; import org.eclipse.kura.camel.cloud.KuraCloudComponent; import org.eclipse.kura.camel.component.Configuration; import org.eclipse.kura.camel.runner.CamelRunner; import org.eclipse.kura.camel.runner.ServiceConsumer; import org.eclipse.kura.cloud.CloudService; import org.eclipse.kura.configuration.ConfigurableComponent; import org.eclipse.kura.message.KuraPayload; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.FrameworkUtil; import org.osgi.framework.InvalidSyntaxException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.eclipse.kura.camel.component.Configuration.*; /** * Example of the Kura Camel application. */ public class ModelRouter implements ConfigurableComponent { private static final Logger logger = LoggerFactory.getLogger(ModelRouter.class); private static String KURA = "cloud:"; private static String TOPIC = "simulator-test/assets"; private static Map<String, String> machineState; private static Processor kuraProcessor = new KuraProcessor(); private Map<String, Object> properties = null; static { machineState = new HashMap<>(); machineState.put("machine-1", "normal"); machineState.put("machine-2", "normal"); machineState.put("machine-3", "normal"); } /** * A RouterBuilder instance which has no routes */ private static final RouteBuilder NO_ROUTES = new RouteBuilder() { @Override public void configure() throws Exception { } }; private CamelRunner camel; private String cloudServiceFilter; public void start(final Map<String, Object> properties) throws Exception { logger.info("Start: {}", properties); this.properties = properties; // create new filter and instance final String cloudServiceFilter = makeCloudServiceFilter(properties); this.camel = createCamelRunner(cloudServiceFilter); // set routes this.camel.setRoutes(fromProperties(properties)); // start this.camel.start(); } public void updated(final Map<String, Object> properties) throws Exception { logger.info("Updating: {}", properties); final String cloudServiceFilter = makeCloudServiceFilter(properties); if (!this.cloudServiceFilter.equals(cloudServiceFilter)) { // update the routes and the filter // stop the camel context first this.camel.stop(); // create a new camel runner, with new dependencies this.camel = createCamelRunner(cloudServiceFilter); // set the routes this.camel.setRoutes(fromProperties(properties)); // and restart again this.camel.start(); } else { // only update the routes, this is done without restarting the context this.camel.setRoutes(fromProperties(properties)); } } public void stop() throws Exception { if (this.camel != null) { this.camel.stop(); this.camel = null; } } private CamelRunner createCamelRunner(final String fullFilter) throws InvalidSyntaxException { final BundleContext ctx = FrameworkUtil.getBundle(ModelRouter.class).getBundleContext(); this.cloudServiceFilter = fullFilter; // create a new camel CamelRunner.Builder final CamelRunner.Builder builder = new CamelRunner.Builder(); // add service dependency builder.dependOn(ctx, FrameworkUtil.createFilter(fullFilter), new ServiceConsumer<CloudService, CamelContext>() { @Override public void consume(final CamelContext context, final CloudService service) { context.addComponent("cloud", new KuraCloudComponent(context, service)); } }); // return un-started instance return builder.build(); } /** * Construct an OSGi filter for a cloud service instance * * @param properties * the properties to read from * @return the OSGi filter selecting the cloud service instance */ private static String makeCloudServiceFilter(final Map<String, Object> properties) { final String filterPid = Configuration.asStringNotEmpty(properties, "cloudService", "org.eclipse.kura.cloud.CloudService"); final String fullFilter = String.format("(&(%s=%s)(kura.service.pid=%s))", Constants.OBJECTCLASS, CloudService.class.getName(), filterPid); return fullFilter; } /** * Create a new RouteBuilder instance from the properties * * @param properties * the properties to read from * @return the new instance of RouteBuilder */ protected RouteBuilder fromProperties(final Map<String, Object> properties) { if (!asBoolean(properties, "enabled")) { return NO_ROUTES; } // Totally disable until we figure out if this is even needed return NO_ROUTES; /* return new RouteBuilder() { @Override public void configure() throws Exception { // This needs to be converted to MQTT from("kafka:model?brokers=34.212.173.140:9092&groupId=kapua_test") .to("log:model") .process(exchange -> { System.out.println("FIRST" + "\n"); String messageKey = ""; if (exchange.getIn() != null) { Message message = exchange.getIn(); Integer partitionId = (Integer) message .getHeader(KafkaConstants.PARTITION); String topicName = (String) message .getHeader(KafkaConstants.TOPIC); if (message.getHeader(KafkaConstants.KEY) != null) messageKey = (String) message .getHeader(KafkaConstants.KEY); Object data = message.getBody(); System.out.println("topicName :: " + topicName + " partitionId :: " + partitionId + " messageKey :: " + messageKey + " message :: " + data + "\n"); } }).to("log:model"); } }; */ } private static String getMachineFromTopic(String in) { int begin = in.indexOf("machines") + 9; int end = in.indexOf("/", begin); return in.substring(begin, end); } private static String getFacilityFromTopic(String in) { int begin = in.indexOf("facilities") + 11; int end = in.indexOf("/", begin); return in.substring(begin, end); } private static class KuraProcessor implements Processor { @Override public void process(Exchange exchange) { KuraPayload payload = new KuraPayload(); payload.setTimestamp(new Date()); List<Map> metrics = (List<Map>) exchange.getIn().getBody(); Map<String, String> map = metrics.get(0); //Each line of the file produces a map of name/value pairs, but we only get one line at a time due to the splitter above for (Map.Entry<String, String> entry : map.entrySet()) { if (!entry.getKey().equalsIgnoreCase("motorid")) { payload.addMetric(entry.getKey(), Double.parseDouble(entry.getValue())); } } exchange.getIn().setBody(payload); } } public class BadDataRoutebuilder extends RouteBuilder { private final String machine; private String mode; public BadDataRoutebuilder(String machine, String mode) { this.machine = machine; this.mode = mode; } public void configure() { // Bad Rotor Locked Data from(asString(properties, "filespec.bad") + mode + "?include=" + machine + ".csv&" + asString(properties, "filespec.options")) //.noAutoStartup()//.threads(12) //Poll for file .routeId(this.machine + this.mode) .split().tokenize("\\n")//.streaming() .setHeader("demo.machine", simple("${file:name.noext}")) .process(exchange -> exchange.getIn().setHeader("demo.machineState", machineState.get(exchange.getIn().getHeader("demo.machine")))) .delay(asLong(properties, "interval")).asyncDelayed() //Delay 1 second between processing lines .choice() .when(header("demo.machineState").isEqualTo(mode)) .unmarshal(new CsvDataFormat() .setIgnoreEmptyLines(true) .setUseMaps(true) .setCommentMarker('#') .setHeader(new String[]{"timestamp", "motorid", "speed", "voltage", "current", "temp", "noise", "vibration"})) .process(kuraProcessor) .toD("cloud:" + asString(properties, "topic.prefix") + "/${file:name.noext}"); } } /* private static class ControlMessageProcessor implements Processor { @Override public void process(Exchange exchange) throws Exception { if exchange.getContext().route } } */ }
Delete stray class Signed-off-by: Chris Custine <[email protected]>
gateway/com.redhat.iot.cloudera.demo.simulator/src/main/java/com/redhat/iot/demo/jpmml/ModelRouter.java
Delete stray class
Java
apache-2.0
0242904f1847a32736232bcaaf568f9054d98a0e
0
realityforge/arez,realityforge/arez,realityforge/arez
package org.realityforge.arez.processor; import java.util.List; import javax.annotation.Nonnull; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.NestingKind; import javax.lang.model.element.PackageElement; import javax.lang.model.element.TypeElement; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; import org.realityforge.arez.annotations.Action; import org.realityforge.arez.annotations.Computed; import org.realityforge.arez.annotations.Container; import org.realityforge.arez.annotations.ContainerId; import org.realityforge.arez.annotations.Observable; final class ContainerDescriptorParser { private ContainerDescriptorParser() { } static ContainerDescriptor parse( @Nonnull final Element element, @Nonnull final Elements elementUtils, @Nonnull final Types typeUtils ) throws ArezProcessorException { final PackageElement packageElement = elementUtils.getPackageOf( element ); final TypeElement typeElement = (TypeElement) element; if ( ElementKind.CLASS != element.getKind() ) { throw new ArezProcessorException( "@Container target must be a class", element ); } else if ( element.getModifiers().contains( Modifier.ABSTRACT ) ) { throw new ArezProcessorException( "@Container target must not be abstract", element ); } else if ( element.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@Container target must not be final", element ); } else if ( NestingKind.TOP_LEVEL != typeElement.getNestingKind() && !element.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@Container target must not be a non-static nested class", element ); } final Container container = typeElement.getAnnotation( Container.class ); final String name = container.name().equals( "<default>" ) ? typeElement.getSimpleName().toString() : container.name(); final ContainerDescriptor descriptor = new ContainerDescriptor( name, container.singleton(), packageElement, typeElement ); final List<ExecutableElement> methods = ProcessorUtil.getMethods( typeElement ); for ( final ExecutableElement method : methods ) { processMethod( descriptor, method ); } //TODO: Validate observers/populate here //TODO: Validate that there is no name collision between Action/Observable/Computed methods if ( descriptor.getObservables().isEmpty() && descriptor.getActions().isEmpty() ) { throw new ArezProcessorException( "@Container target has no methods annotated with @Action or @Observable", typeElement ); } return descriptor; } private static void processMethod( @Nonnull final ContainerDescriptor descriptor, @Nonnull final ExecutableElement method ) throws ArezProcessorException { final Action action = method.getAnnotation( Action.class ); final Observable observable = method.getAnnotation( Observable.class ); final Computed computed = method.getAnnotation( Computed.class ); final ContainerId containerId = method.getAnnotation( ContainerId.class ); if ( null != action && null != observable ) { throw new ArezProcessorException( "Method can not be annotated with both @Action and @Observable", method ); } else if ( null != action && null != computed ) { throw new ArezProcessorException( "Method can not be annotated with both @Action and @Computed", method ); } else if ( null != action && null != containerId ) { throw new ArezProcessorException( "Method can not be annotated with both @Action and @ContainerId", method ); } else if ( null != observable && null != computed ) { throw new ArezProcessorException( "Method can not be annotated with both @Observable and @Computed", method ); } else if ( null != observable && null != containerId ) { throw new ArezProcessorException( "Method can not be annotated with both @Observable and @ContainerId", method ); } else if ( null != containerId && null != computed ) { throw new ArezProcessorException( "Method can not be annotated with both @ContainerId and @Computed", method ); } if ( null != observable ) { processObservable( descriptor, observable, method ); } else if ( null != action ) { processAction( descriptor, action, method ); } else if ( null != computed ) { processComputed( descriptor, computed, method ); } else if ( null != containerId ) { processContainerId( descriptor, containerId, method ); } } private static void processContainerId( @Nonnull final ContainerDescriptor descriptor, @Nonnull final ContainerId containerId, @Nonnull final ExecutableElement method ) throws ArezProcessorException { if ( descriptor.isSingleton() ) { throw new ArezProcessorException( "@ContainerId must not exist if @Container is a singleton", method ); } else if ( method.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@ContainerId target must not be static", method ); } else if ( method.getModifiers().contains( Modifier.PRIVATE ) ) { throw new ArezProcessorException( "@ContainerId target must not be private", method ); } else if ( !method.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@ContainerId target must be final", method ); } else if ( TypeKind.VOID == method.getReturnType().getKind() ) { throw new ArezProcessorException( "@ContainerId target must return a value", method ); } else if ( 0 != method.getParameters().size() ) { throw new ArezProcessorException( "@ContainerId target must not have any parameters", method ); } final ExecutableElement existing = descriptor.getContainerId(); if ( null != existing ) { throw new ArezProcessorException( "@ContainerId target duplicates existing method named " + existing.getSimpleName(), method ); } else { descriptor.setContainerId( method ); } } private static void processComputed( @Nonnull final ContainerDescriptor descriptor, @Nonnull final Computed computed, @Nonnull final ExecutableElement method ) { //TODO: } private static void processAction( @Nonnull final ContainerDescriptor descriptor, @Nonnull final Action annotation, @Nonnull final ExecutableElement method ) throws ArezProcessorException { if ( method.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@Action target must not be final", method ); } else if ( method.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@Action target must not be static", method ); } final String name; if ( annotation.name().equals( "<default>" ) ) { name = method.getSimpleName().toString(); } else { name = annotation.name(); if ( name.isEmpty() || !isJavaIdentifier( name ) ) { throw new ArezProcessorException( "Method annotated with @Action specified invalid name " + name, method ); } } final ActionDescriptor action = descriptor.getAction( name ); if ( null != action ) { throw new ArezProcessorException( "Method annotated with @Action specified name " + name + " that duplicates action defined by method " + action.getAction().getSimpleName(), method ); } else { descriptor.addAction( new ActionDescriptor( name, annotation.mutation(), method ) ); } } private static void processObservable( @Nonnull final ContainerDescriptor descriptor, @Nonnull final Observable annotation, @Nonnull final ExecutableElement method ) throws ArezProcessorException { if ( method.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@Observable target must not be final", method ); } else if ( method.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@Observable target must not be static", method ); } final TypeMirror returnType = method.getReturnType(); final String methodName = method.getSimpleName().toString(); String name; final boolean setter; if ( TypeKind.VOID == returnType.getKind() ) { setter = true; //Should be a setter if ( 1 != method.getParameters().size() ) { throw new ArezProcessorException( "Method annotated with @Observable should be a setter or getter", method ); } if ( methodName.startsWith( "set" ) && methodName.length() > 4 && Character.isUpperCase( methodName.charAt( 3 ) ) ) { name = Character.toLowerCase( methodName.charAt( 3 ) ) + methodName.substring( 4 ); } else { name = methodName; } } else { setter = false; //Must be a getter if ( 0 != method.getParameters().size() ) { throw new ArezProcessorException( "Method annotated with @Observable should be a setter or getter", method ); } if ( methodName.startsWith( "get" ) && methodName.length() > 4 && Character.isUpperCase( methodName.charAt( 3 ) ) ) { name = Character.toLowerCase( methodName.charAt( 3 ) ) + methodName.substring( 4 ); } else if ( methodName.startsWith( "is" ) && methodName.length() > 3 && Character.isUpperCase( methodName.charAt( 2 ) ) ) { name = Character.toLowerCase( methodName.charAt( 2 ) ) + methodName.substring( 3 ); } else { name = methodName; } } // Override name if supplied by user if ( !annotation.name().equals( "<default>" ) ) { name = annotation.name(); if ( !name.isEmpty() ) { if ( !isJavaIdentifier( name ) ) { throw new ArezProcessorException( "Method annotated with @Observable specified invalid name " + name, method ); } } } final ObservableDescriptor observable = descriptor.getObservableByName( name ); if ( setter ) { if ( observable.hasSetter() ) { throw new ArezProcessorException( "Method annotated with @Observable defines duplicate setter for " + "observable named " + name, method ); } observable.setSetter( method ); } else { if ( observable.hasGetter() ) { throw new ArezProcessorException( "Method annotated with @Observable defines duplicate getter for " + "observable named " + name, method ); } observable.setGetter( method ); } } private static boolean isJavaIdentifier( @Nonnull final String value ) { if ( !Character.isJavaIdentifierStart( value.charAt( 0 ) ) ) { return false; } else { final int length = value.length(); for ( int i = 1; i < length; i++ ) { if ( !Character.isJavaIdentifierPart( value.charAt( i ) ) ) { return false; } } return true; } } }
processor/src/main/java/org/realityforge/arez/processor/ContainerDescriptorParser.java
package org.realityforge.arez.processor; import java.util.List; import javax.annotation.Nonnull; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.NestingKind; import javax.lang.model.element.PackageElement; import javax.lang.model.element.TypeElement; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; import org.realityforge.arez.annotations.Action; import org.realityforge.arez.annotations.Computed; import org.realityforge.arez.annotations.Container; import org.realityforge.arez.annotations.ContainerId; import org.realityforge.arez.annotations.Observable; final class ContainerDescriptorParser { private ContainerDescriptorParser() { } static ContainerDescriptor parse( @Nonnull final Element element, @Nonnull final Elements elementUtils, @Nonnull final Types typeUtils ) throws ArezProcessorException { final PackageElement packageElement = elementUtils.getPackageOf( element ); final TypeElement typeElement = (TypeElement) element; if ( ElementKind.CLASS != element.getKind() ) { throw new ArezProcessorException( "@Container target must be a class", element ); } else if ( element.getModifiers().contains( Modifier.ABSTRACT ) ) { throw new ArezProcessorException( "@Container target must not be abstract", element ); } else if ( element.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@Container target must not be final", element ); } else if ( NestingKind.TOP_LEVEL != typeElement.getNestingKind() && !element.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@Container target must not be a non-static nested class", element ); } final Container container = typeElement.getAnnotation( Container.class ); final String name = container.name().equals( "<default>" ) ? typeElement.getSimpleName().toString() : container.name(); final ContainerDescriptor descriptor = new ContainerDescriptor( name, container.singleton(), packageElement, typeElement ); final List<ExecutableElement> methods = ProcessorUtil.getMethods( typeElement ); for ( final ExecutableElement method : methods ) { processMethod( descriptor, method ); } //TODO: Validate observers/populate here //TODO: Validate that there is no name collision between Action/Observable/Computed methods if ( descriptor.getObservables().isEmpty() && descriptor.getActions().isEmpty() ) { throw new ArezProcessorException( "@Container target has no methods annotated with @Action or @Observable", typeElement ); } return descriptor; } private static void processMethod( @Nonnull final ContainerDescriptor descriptor, @Nonnull final ExecutableElement method ) throws ArezProcessorException { final Action action = method.getAnnotation( Action.class ); final Observable observable = method.getAnnotation( Observable.class ); final Computed computed = method.getAnnotation( Computed.class ); final ContainerId containerId = method.getAnnotation( ContainerId.class ); if ( null != action && null != observable ) { throw new ArezProcessorException( "Method can not be annotated with both @Action and @Observable", method ); } else if ( null != action && null != computed ) { throw new ArezProcessorException( "Method can not be annotated with both @Action and @Computed", method ); } else if ( null != action && null != containerId ) { throw new ArezProcessorException( "Method can not be annotated with both @Action and @ContainerId", method ); } else if ( null != observable && null != computed ) { throw new ArezProcessorException( "Method can not be annotated with both @Observable and @Computed", method ); } else if ( null != observable && null != containerId ) { throw new ArezProcessorException( "Method can not be annotated with both @Observable and @ContainerId", method ); } else if ( null != containerId && null != computed ) { throw new ArezProcessorException( "Method can not be annotated with both @ContainerId and @Computed", method ); } if ( null != observable ) { processObservable( descriptor, observable, method ); } else if ( null != action ) { processAction( descriptor, action, method ); } else if ( null != computed ) { processComputed( descriptor, computed, method ); } else if ( null != containerId ) { processContainerId( descriptor, containerId, method ); } } private static void processContainerId( @Nonnull final ContainerDescriptor descriptor, @Nonnull final ContainerId containerId, @Nonnull final ExecutableElement method ) throws ArezProcessorException { if ( descriptor.isSingleton() ) { throw new ArezProcessorException( "@ContainerId must not exist if @Container is a singleton", method ); } else if ( method.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@ContainerId target must not be static", method ); } else if ( method.getModifiers().contains( Modifier.PRIVATE ) ) { throw new ArezProcessorException( "@ContainerId target must not be private", method ); } else if ( !method.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@ContainerId target must be final", method ); } else if ( TypeKind.VOID == method.getReturnType().getKind() ) { throw new ArezProcessorException( "@ContainerId target must return a value", method ); } else if ( 0 != method.getParameters().size() ) { throw new ArezProcessorException( "@ContainerId target must not have any parameters", method ); } final ExecutableElement existing = descriptor.getContainerId(); if ( null != existing ) { throw new ArezProcessorException( "@ContainerId target duplicates existing method named " + existing.getSimpleName(), method ); } else { descriptor.setContainerId( method ); } } private static void processComputed( @Nonnull final ContainerDescriptor descriptor, @Nonnull final Computed computed, @Nonnull final ExecutableElement method ) { //TODO: } private static void processAction( @Nonnull final ContainerDescriptor descriptor, @Nonnull final Action annotation, @Nonnull final ExecutableElement method ) throws ArezProcessorException { if ( method.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@Action target must not be final", method ); } else if ( method.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@Action target must not be static", method ); } final TypeMirror returnType = method.getReturnType(); final String name; if ( annotation.name().equals( "<default>" ) ) { name = method.getSimpleName().toString(); } else { name = annotation.name(); if ( name.isEmpty() || !isJavaIdentifier( name ) ) { throw new ArezProcessorException( "Method annotated with @Action specified invalid name " + name, method ); } } final ActionDescriptor action = descriptor.getAction( name ); if ( null != action ) { throw new ArezProcessorException( "Method annotated with @Action specified name " + name + " that duplicates action defined by method " + action.getAction().getSimpleName(), method ); } else { descriptor.addAction( new ActionDescriptor( name, annotation.mutation(), method ) ); } } private static void processObservable( @Nonnull final ContainerDescriptor descriptor, @Nonnull final Observable annotation, @Nonnull final ExecutableElement method ) throws ArezProcessorException { if ( method.getModifiers().contains( Modifier.FINAL ) ) { throw new ArezProcessorException( "@Observable target must not be final", method ); } else if ( method.getModifiers().contains( Modifier.STATIC ) ) { throw new ArezProcessorException( "@Observable target must not be static", method ); } final TypeMirror returnType = method.getReturnType(); final String methodName = method.getSimpleName().toString(); String name; final boolean setter; if ( TypeKind.VOID == returnType.getKind() ) { setter = true; //Should be a setter if ( 1 != method.getParameters().size() ) { throw new ArezProcessorException( "Method annotated with @Observable should be a setter or getter", method ); } if ( methodName.startsWith( "set" ) && methodName.length() > 4 && Character.isUpperCase( methodName.charAt( 3 ) ) ) { name = Character.toLowerCase( methodName.charAt( 3 ) ) + methodName.substring( 4 ); } else { name = methodName; } } else { setter = false; //Must be a getter if ( 0 != method.getParameters().size() ) { throw new ArezProcessorException( "Method annotated with @Observable should be a setter or getter", method ); } if ( methodName.startsWith( "get" ) && methodName.length() > 4 && Character.isUpperCase( methodName.charAt( 3 ) ) ) { name = Character.toLowerCase( methodName.charAt( 3 ) ) + methodName.substring( 4 ); } else if ( methodName.startsWith( "is" ) && methodName.length() > 3 && Character.isUpperCase( methodName.charAt( 2 ) ) ) { name = Character.toLowerCase( methodName.charAt( 2 ) ) + methodName.substring( 3 ); } else { name = methodName; } } // Override name if supplied by user if ( !annotation.name().equals( "<default>" ) ) { name = annotation.name(); if ( !name.isEmpty() ) { if ( !isJavaIdentifier( name ) ) { throw new ArezProcessorException( "Method annotated with @Observable specified invalid name " + name, method ); } } } final ObservableDescriptor observable = descriptor.getObservableByName( name ); if ( setter ) { if ( observable.hasSetter() ) { throw new ArezProcessorException( "Method annotated with @Observable defines duplicate setter for " + "observable named " + name, method ); } observable.setSetter( method ); } else { if ( observable.hasGetter() ) { throw new ArezProcessorException( "Method annotated with @Observable defines duplicate getter for " + "observable named " + name, method ); } observable.setGetter( method ); } } private static boolean isJavaIdentifier( @Nonnull final String value ) { if ( !Character.isJavaIdentifierStart( value.charAt( 0 ) ) ) { return false; } else { final int length = value.length(); for ( int i = 1; i < length; i++ ) { if ( !Character.isJavaIdentifierPart( value.charAt( i ) ) ) { return false; } } return true; } } }
Remove unused line
processor/src/main/java/org/realityforge/arez/processor/ContainerDescriptorParser.java
Remove unused line
Java
apache-2.0
59c538903187686d858e8a6513ac156e4a54acd8
0
bright-tools/androidphotobackup
package com.brightsilence.dev.androidphotobackup; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.preference.PreferenceManager; import android.support.v4.content.WakefulBroadcastReceiver; import android.util.Log; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.concurrent.TimeUnit; /** * When the alarm fires, this WakefulBroadcastReceiver receives the broadcast Intent * and then starts the IntentService {@code SampleSchedulingService} to do some work. */ public class PhotoBackupAlarmReceiver extends WakefulBroadcastReceiver { public static final String TAG = "PhotoBackup::PhotoBackupAlarmReceiver"; // The app's AlarmManager, which provides access to the system alarm services. private AlarmManager alarmMgr; // The pending intent that is triggered when the alarm fires. private PendingIntent alarmIntent; @Override public void onReceive(Context context, Intent intent) { Log.d(TAG,"onReceive - Alarm triggered"); // BEGIN_INCLUDE(alarm_onreceive) /* * If your receiver intent includes extras that need to be passed along to the * service, use setComponent() to indicate that the service should handle the * receiver's intent. For example: * * ComponentName comp = new ComponentName(context.getPackageName(), * MyService.class.getName()); * * // This intent passed in this call will include the wake lock extra as well as * // the receiver intent contents. * startWakefulService(context, (intent.setComponent(comp))); * * In this example, we simply create a new intent to deliver to the service. * This intent holds an extra identifying the wake lock. */ Intent service = new Intent(context, PhotoBackupService.class); // Start the service, keeping the device awake while it is launching. startWakefulService(context, service); // END_INCLUDE(alarm_onreceive) } // BEGIN_INCLUDE(set_alarm) /** * Sets a repeating alarm that runs once a day at approximately 8:30 a.m. When the * alarm fires, the app broadcasts an Intent to this WakefulBroadcastReceiver. * @param context */ public void setAlarm(Context context) { alarmMgr = (AlarmManager)context.getSystemService(Context.ALARM_SERVICE); Intent intent = new Intent(context, PhotoBackupAlarmReceiver.class); alarmIntent = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); long alarmTime = sharedPreferences.getLong("backup_trigger_time", 0 ); // Is the alarm in the past? If so, setting it will trigger an alarm straight away, // which we don't want, so push the time out by a day. while( alarmTime < System.currentTimeMillis() ) { alarmTime += AlarmManager.INTERVAL_DAY; } // TODO: Remove me // TODO: This is here for testing only - the alarm will trigger immediately as it's set in the past. alarmTime = System.currentTimeMillis()-6000; Log.d(TAG, "Alarm set for: " + alarmTime+" (now: "+System.currentTimeMillis()+")"); /* * If you don't have precise time requirements, use an inexact repeating alarm * the minimize the drain on the device battery. * * The call below specifies the alarm type, the trigger time, the interval at * which the alarm is fired, and the alarm's associated PendingIntent. * It uses the alarm type RTC_WAKEUP ("Real Time Clock" wake up), which wakes up * the device and triggers the alarm according to the time of the device's clock. * * Alternatively, you can use the alarm type ELAPSED_REALTIME_WAKEUP to trigger * an alarm based on how much time has elapsed since the device was booted. This * is the preferred choice if your alarm is based on elapsed time--for example, if * you simply want your alarm to fire every 60 minutes. You only need to use * RTC_WAKEUP if you want your alarm to fire at a particular date/time. Remember * that clock-based time may not translate well to other locales, and that your * app's behavior could be affected by the user changing the device's time setting. * * Here are some examples of ELAPSED_REALTIME_WAKEUP: * * // Wake up the device to fire a one-time alarm in one minute. * alarmMgr.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, * SystemClock.elapsedRealtime() + * 60*1000, alarmIntent); * * // Wake up the device to fire the alarm in 30 minutes, and every 30 minutes * // after that. * alarmMgr.setInexactRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, * AlarmManager.INTERVAL_HALF_HOUR, * AlarmManager.INTERVAL_HALF_HOUR, alarmIntent); */ // Set the alarm to fire at approximately 8:30 a.m., according to the device's // clock, and to repeat once a day. alarmMgr.setInexactRepeating(AlarmManager.RTC_WAKEUP, alarmTime, AlarmManager.INTERVAL_DAY, alarmIntent); // Enable {@code PhotoBackupServiceStarter} to automatically restart the alarm when the // device is rebooted. ComponentName receiver = new ComponentName(context, PhotoBackupServiceStarter.class); PackageManager pm = context.getPackageManager(); pm.setComponentEnabledSetting(receiver, PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP); } // END_INCLUDE(set_alarm) /** * Cancels the alarm. * @param context */ // BEGIN_INCLUDE(cancel_alarm) public void cancelAlarm(Context context) { // If the alarm has been set, cancel it. alarmMgr = (AlarmManager)context.getSystemService(Context.ALARM_SERVICE); Intent intent = new Intent(context, PhotoBackupAlarmReceiver.class); alarmIntent = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_NO_CREATE); if (alarmIntent!= null) { alarmMgr.cancel(alarmIntent); } // Disable {@code PhotoBackupServiceStarter} so that it doesn't automatically restart the // alarm when the device is rebooted. ComponentName receiver = new ComponentName(context, PhotoBackupServiceStarter.class); PackageManager pm = context.getPackageManager(); pm.setComponentEnabledSetting(receiver, PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); } // END_INCLUDE(cancel_alarm) }
app/src/main/java/com/brightsilence/dev/androidphotobackup/PhotoBackupAlarmReceiver.java
package com.brightsilence.dev.androidphotobackup; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.preference.PreferenceManager; import android.support.v4.content.WakefulBroadcastReceiver; import android.util.Log; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.concurrent.TimeUnit; /** * When the alarm fires, this WakefulBroadcastReceiver receives the broadcast Intent * and then starts the IntentService {@code SampleSchedulingService} to do some work. */ public class PhotoBackupAlarmReceiver extends WakefulBroadcastReceiver { public static final String TAG = "PhotoBackup::PhotoBackupAlarmReceiver"; // The app's AlarmManager, which provides access to the system alarm services. private AlarmManager alarmMgr; // The pending intent that is triggered when the alarm fires. private PendingIntent alarmIntent; @Override public void onReceive(Context context, Intent intent) { Log.d(TAG,"onReceive - Alarm triggered"); // BEGIN_INCLUDE(alarm_onreceive) /* * If your receiver intent includes extras that need to be passed along to the * service, use setComponent() to indicate that the service should handle the * receiver's intent. For example: * * ComponentName comp = new ComponentName(context.getPackageName(), * MyService.class.getName()); * * // This intent passed in this call will include the wake lock extra as well as * // the receiver intent contents. * startWakefulService(context, (intent.setComponent(comp))); * * In this example, we simply create a new intent to deliver to the service. * This intent holds an extra identifying the wake lock. */ Intent service = new Intent(context, PhotoBackupService.class); // Start the service, keeping the device awake while it is launching. startWakefulService(context, service); // END_INCLUDE(alarm_onreceive) } // BEGIN_INCLUDE(set_alarm) /** * Sets a repeating alarm that runs once a day at approximately 8:30 a.m. When the * alarm fires, the app broadcasts an Intent to this WakefulBroadcastReceiver. * @param context */ public void setAlarm(Context context) { alarmMgr = (AlarmManager)context.getSystemService(Context.ALARM_SERVICE); Intent intent = new Intent(context, PhotoBackupAlarmReceiver.class); alarmIntent = PendingIntent.getBroadcast(context, 0, intent, 0); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); long alarmTime = sharedPreferences.getLong("backup_trigger_time", 0 ); // Is the alarm in the past? If so, setting it will trigger an alarm straight away, // which we don't want, so push the time out by a day. while( alarmTime < System.currentTimeMillis() ) { alarmTime += AlarmManager.INTERVAL_DAY; } // TODO: Remove me // TODO: This is here for testing only - the alarm will trigger immediately as it's set in the past. //alarmTime = System.currentTimeMillis()-6000; Log.d(TAG, "Alarm set for: " + alarmTime+" (now: "+System.currentTimeMillis()+")"); /* * If you don't have precise time requirements, use an inexact repeating alarm * the minimize the drain on the device battery. * * The call below specifies the alarm type, the trigger time, the interval at * which the alarm is fired, and the alarm's associated PendingIntent. * It uses the alarm type RTC_WAKEUP ("Real Time Clock" wake up), which wakes up * the device and triggers the alarm according to the time of the device's clock. * * Alternatively, you can use the alarm type ELAPSED_REALTIME_WAKEUP to trigger * an alarm based on how much time has elapsed since the device was booted. This * is the preferred choice if your alarm is based on elapsed time--for example, if * you simply want your alarm to fire every 60 minutes. You only need to use * RTC_WAKEUP if you want your alarm to fire at a particular date/time. Remember * that clock-based time may not translate well to other locales, and that your * app's behavior could be affected by the user changing the device's time setting. * * Here are some examples of ELAPSED_REALTIME_WAKEUP: * * // Wake up the device to fire a one-time alarm in one minute. * alarmMgr.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, * SystemClock.elapsedRealtime() + * 60*1000, alarmIntent); * * // Wake up the device to fire the alarm in 30 minutes, and every 30 minutes * // after that. * alarmMgr.setInexactRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, * AlarmManager.INTERVAL_HALF_HOUR, * AlarmManager.INTERVAL_HALF_HOUR, alarmIntent); */ // Set the alarm to fire at approximately 8:30 a.m., according to the device's // clock, and to repeat once a day. alarmMgr.setInexactRepeating(AlarmManager.RTC_WAKEUP, alarmTime, AlarmManager.INTERVAL_DAY, alarmIntent); // Enable {@code PhotoBackupServiceStarter} to automatically restart the alarm when the // device is rebooted. ComponentName receiver = new ComponentName(context, PhotoBackupServiceStarter.class); PackageManager pm = context.getPackageManager(); pm.setComponentEnabledSetting(receiver, PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP); } // END_INCLUDE(set_alarm) /** * Cancels the alarm. * @param context */ // BEGIN_INCLUDE(cancel_alarm) public void cancelAlarm(Context context) { // If the alarm has been set, cancel it. alarmMgr = (AlarmManager)context.getSystemService(Context.ALARM_SERVICE); Intent intent = new Intent(context, PhotoBackupAlarmReceiver.class); alarmIntent = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_NO_CREATE); if (alarmIntent!= null) { alarmMgr.cancel(alarmIntent); } // Disable {@code PhotoBackupServiceStarter} so that it doesn't automatically restart the // alarm when the device is rebooted. ComponentName receiver = new ComponentName(context, PhotoBackupServiceStarter.class); PackageManager pm = context.getPackageManager(); pm.setComponentEnabledSetting(receiver, PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); } // END_INCLUDE(cancel_alarm) }
Cancel existing alarm when using setAlarm(). Seems to have more robust behaviour when re-scheduling an alarm when a previous one already existed.
app/src/main/java/com/brightsilence/dev/androidphotobackup/PhotoBackupAlarmReceiver.java
Cancel existing alarm when using setAlarm(). Seems to have more robust behaviour when re-scheduling an alarm when a previous one already existed.
Java
apache-2.0
4320bbc6632bd37aff2030a0a71003a344b2c2db
0
dsyer/spring-cloud-sleuth,marcingrzejszczak/spring-cloud-sleuth,marcingrzejszczak/spring-cloud-sleuth,spring-cloud/spring-cloud-sleuth,spring-cloud/spring-cloud-sleuth,marcingrzejszczak/spring-cloud-sleuth,marcingrzejszczak/spring-cloud-sleuth,spring-cloud/spring-cloud-sleuth,dsyer/spring-cloud-sleuth,spring-cloud/spring-cloud-sleuth,spring-cloud/spring-cloud-sleuth
package org.springframework.cloud.sleuth; import java.io.Closeable; import lombok.SneakyThrows; import lombok.Value; import lombok.experimental.NonFinal; import org.springframework.cloud.sleuth.event.SpanStoppedEvent; import org.springframework.context.ApplicationEventPublisher; /** * @author Spencer Gibb */ @Value @NonFinal public class TraceScope implements Closeable { private final ApplicationEventPublisher publisher; /** * the span for this scope */ private final Span span; /** * the span that was "current" before this scope was entered */ private final Span savedSpan; @NonFinal private boolean detached = false; public TraceScope(ApplicationEventPublisher publisher, Span span, Span savedSpan) { this.publisher = publisher; this.span = span; this.savedSpan = savedSpan; } /** * Remove this span as the current thread, but don't stop it yet or * send it for collection. This is useful if the span object is then * passed to another thread for use with Trace.continueTrace(). * * @return the same Span object */ public Span detach() { if (detached) { Utils.error("Tried to detach trace span " + span + " but " + "it has already been detached."); } detached = true; Span cur = TraceContextHolder.getCurrentSpan(); if (cur != span) { Utils.error("Tried to detach trace span " + span + " but " + "it is not the current span for the " + Thread.currentThread().getName() + " thread. You have " + "probably forgotten to close or detach " + cur); } else { TraceContextHolder.setCurrentSpan(savedSpan); } return span; } @Override @SneakyThrows public void close() { if (detached) { return; } detached = true; Span cur = TraceContextHolder.getCurrentSpan(); if (cur != span) { Utils.error("Tried to close trace span " + span + " but " + "it is not the current span for the " + Thread.currentThread().getName() + " thread. You have " + "probably forgotten to close or detach " + cur); } else { span.stop(); this.publisher.publishEvent(new SpanStoppedEvent(this, span)); TraceContextHolder.setCurrentSpan(savedSpan); } } }
spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/TraceScope.java
package org.springframework.cloud.sleuth; import java.io.Closeable; import lombok.Data; import lombok.SneakyThrows; import org.springframework.cloud.sleuth.event.SpanStoppedEvent; import org.springframework.context.ApplicationEventPublisher; /** * @author Spencer Gibb */ @Data public class TraceScope implements Closeable { private final ApplicationEventPublisher publisher; /** * the span for this scope */ private final Span span; /** * the span that was "current" before this scope was entered */ private final Span savedSpan; private boolean detached = false; public TraceScope(ApplicationEventPublisher publisher, Span span, Span savedSpan) { this.publisher = publisher; this.span = span; this.savedSpan = savedSpan; } /** * Remove this span as the current thread, but don't stop it yet or * send it for collection. This is useful if the span object is then * passed to another thread for use with Trace.continueTrace(). * * @return the same Span object */ public Span detach() { if (detached) { Utils.error("Tried to detach trace span " + span + " but " + "it has already been detached."); } detached = true; Span cur = TraceContextHolder.getCurrentSpan(); if (cur != span) { Utils.error("Tried to detach trace span " + span + " but " + "it is not the current span for the " + Thread.currentThread().getName() + " thread. You have " + "probably forgotten to close or detach " + cur); } else { TraceContextHolder.setCurrentSpan(savedSpan); } return span; } @Override @SneakyThrows public void close() { if (detached) { return; } detached = true; Span cur = TraceContextHolder.getCurrentSpan(); if (cur != span) { Utils.error("Tried to close trace span " + span + " but " + "it is not the current span for the " + Thread.currentThread().getName() + " thread. You have " + "probably forgotten to close or detach " + cur); } else { span.stop(); this.publisher.publishEvent(new SpanStoppedEvent(this, span)); TraceContextHolder.setCurrentSpan(savedSpan); } } }
removed setters
spring-cloud-sleuth-core/src/main/java/org/springframework/cloud/sleuth/TraceScope.java
removed setters
Java
apache-2.0
4c687ccf4713a137b799b87541fd5c9e744f11cb
0
arrayexpress/ae-interface,arrayexpress/ae-interface,arrayexpress/ae-interface
package uk.ac.ebi.arrayexpress.utils.saxon.functions.saxon; /* * Copyright 2009-2014 European Molecular Biology Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import net.sf.saxon.Controller; import net.sf.saxon.event.Builder; import net.sf.saxon.event.Receiver; import net.sf.saxon.event.Sender; import net.sf.saxon.expr.XPathContext; import net.sf.saxon.lib.AugmentedSource; import net.sf.saxon.lib.ExtensionFunctionCall; import net.sf.saxon.lib.ExtensionFunctionDefinition; import net.sf.saxon.lib.NamespaceConstant; import net.sf.saxon.om.*; import net.sf.saxon.trans.XPathException; import net.sf.saxon.value.SequenceType; import net.sf.saxon.value.Whitespace; import nu.validator.htmlparser.common.DoctypeExpectation; import nu.validator.htmlparser.sax.HtmlParser; import org.xml.sax.InputSource; import javax.xml.transform.Source; import javax.xml.transform.sax.SAXSource; import java.io.StringReader; public class ParseHTMLFunction extends ExtensionFunctionDefinition { private static final StructuredQName qName = new StructuredQName("", NamespaceConstant.SAXON, "parse-html"); public StructuredQName getFunctionQName() { return qName; } public int getMinimumNumberOfArguments() { return 1; } public int getMaximumNumberOfArguments() { return 1; } public SequenceType[] getArgumentTypes() { return new SequenceType[]{SequenceType.SINGLE_STRING}; } public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) { return SequenceType.SINGLE_NODE; } public ExtensionFunctionCall makeCallExpression() { return new ParseHTMLCall(); } private static class ParseHTMLCall extends ExtensionFunctionCall { private transient HtmlParser parser; @SuppressWarnings("unchecked") public Sequence call( XPathContext context, Sequence[] arguments ) throws XPathException { Controller controller = context.getController(); Item contextItem = context.getContextItem(); String baseURI = null != contextItem && contextItem instanceof NodeInfo ? ((NodeInfo)context.getContextItem()).getBaseURI() : ""; StringReader sr = new StringReader(SequenceTool.getStringValue(arguments[0])); InputSource is = new InputSource(sr); is.setSystemId(baseURI); Source source = new SAXSource(getParser(), is); source.setSystemId(baseURI); Builder b = controller.makeBuilder(); Receiver s = b; source = AugmentedSource.makeAugmentedSource(source); ((AugmentedSource) source).setStripSpace(Whitespace.XSLT); if (controller.getExecutable().stripsInputTypeAnnotations()) { s = controller.getConfiguration().getAnnotationStripper(s); } try { Sender.send(source, s, null); NodeInfo node = b.getCurrentRoot(); b.reset(); return node; } catch (XPathException err) { throw new XPathException(err); } } private HtmlParser getParser() { if (null == parser) { parser = new HtmlParser(); parser.setDoctypeExpectation(DoctypeExpectation.NO_DOCTYPE_ERRORS); parser.setReportingDoctype(false); } return parser; } } }
webapp/src/main/java/uk/ac/ebi/arrayexpress/utils/saxon/functions/saxon/ParseHTMLFunction.java
package uk.ac.ebi.arrayexpress.utils.saxon.functions.saxon; /* * Copyright 2009-2014 European Molecular Biology Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import net.sf.saxon.Controller; import net.sf.saxon.event.Builder; import net.sf.saxon.event.Receiver; import net.sf.saxon.event.Sender; import net.sf.saxon.expr.XPathContext; import net.sf.saxon.lib.AugmentedSource; import net.sf.saxon.lib.ExtensionFunctionCall; import net.sf.saxon.lib.ExtensionFunctionDefinition; import net.sf.saxon.lib.NamespaceConstant; import net.sf.saxon.om.*; import net.sf.saxon.trans.XPathException; import net.sf.saxon.value.SequenceType; import net.sf.saxon.value.Whitespace; import nu.validator.htmlparser.common.DoctypeExpectation; import nu.validator.htmlparser.sax.HtmlParser; import org.xml.sax.InputSource; import javax.xml.transform.Source; import javax.xml.transform.sax.SAXSource; import java.io.StringReader; public class ParseHTMLFunction extends ExtensionFunctionDefinition { private static final StructuredQName qName = new StructuredQName("", NamespaceConstant.SAXON, "parse-html"); public StructuredQName getFunctionQName() { return qName; } public int getMinimumNumberOfArguments() { return 1; } public int getMaximumNumberOfArguments() { return 1; } public SequenceType[] getArgumentTypes() { return new SequenceType[]{SequenceType.SINGLE_STRING}; } public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) { return SequenceType.SINGLE_NODE; } public ExtensionFunctionCall makeCallExpression() { return new ParseHTMLCall(); } private static class ParseHTMLCall extends ExtensionFunctionCall { private transient HtmlParser parser; @SuppressWarnings("unchecked") public Sequence call( XPathContext context, Sequence[] arguments ) throws XPathException { Controller controller = context.getController(); Item contextItem = context.getContextItem(); String baseURI = null != contextItem && contextItem instanceof NodeInfo ? ((NodeInfo)context.getContextItem()).getBaseURI() : ""; StringReader sr = new StringReader(SequenceTool.getStringValue(arguments[0])); InputSource is = new InputSource(sr); is.setSystemId(baseURI); Source source = new SAXSource(getParser(), is); source.setSystemId(baseURI); Builder b = controller.makeBuilder(); Receiver s = b; source = AugmentedSource.makeAugmentedSource(source); ((AugmentedSource) source).setStripSpace(Whitespace.XSLT); if (controller.getExecutable().stripsInputTypeAnnotations()) { s = controller.getConfiguration().getAnnotationStripper(s); } try { Sender.send(source, s, null); NodeInfo node = b.getCurrentRoot(); b.reset(); return node; } catch (XPathException err) { throw new XPathException(err); } } private HtmlParser getParser() { if (null == parser) { parser = new HtmlParser(); parser.setDoctypeExpectation(DoctypeExpectation.HTML); parser.setReportingDoctype(false); } return parser; } } }
- ... git-svn-id: d21cf3f823a38adcd0967a5aa33e5b271a1966c2@26533 2913f559-6b04-0410-9a09-c530ee9f5186
webapp/src/main/java/uk/ac/ebi/arrayexpress/utils/saxon/functions/saxon/ParseHTMLFunction.java
- ...
Java
apache-2.0
faf253445c690f96c9c4768b000d61f845e7c93b
0
KurtStam/fabric8,zmhassan/fabric8,chirino/fabric8,dhirajsb/fuse,janstey/fuse-1,chirino/fabric8,punkhorn/fuse,jonathanchristison/fabric8,jludvice/fabric8,punkhorn/fuse,hekonsek/fabric8,zmhassan/fabric8,gnodet/fuse,aslakknutsen/fabric8,janstey/fuse,migue/fabric8,janstey/fabric8,jonathanchristison/fabric8,jimmidyson/fabric8,sobkowiak/fabric8,gashcrumb/fabric8,rhuss/fabric8,janstey/fuse,hekonsek/fabric8,janstey/fabric8,janstey/fuse,opensourceconsultant/fuse,cunningt/fuse,jonathanchristison/fabric8,rnc/fabric8,rmarting/fuse,christian-posta/fabric8,migue/fabric8,chirino/fabric8v2,dhirajsb/fuse,rnc/fabric8,sobkowiak/fuse,avano/fabric8,mwringe/fabric8,punkhorn/fabric8,janstey/fuse,janstey/fuse-1,gashcrumb/fabric8,sobkowiak/fabric8,zmhassan/fabric8,migue/fabric8,EricWittmann/fabric8,gnodet/fuse,chirino/fabric8,christian-posta/fabric8,dejanb/fuse,EricWittmann/fabric8,chirino/fuse,christian-posta/fabric8,hekonsek/fabric8,jimmidyson/fabric8,dhirajsb/fabric8,jludvice/fabric8,dhirajsb/fabric8,punkhorn/fabric8,rhuss/fabric8,EricWittmann/fabric8,avano/fabric8,rhuss/fabric8,chirino/fabric8,punkhorn/fabric8,dhirajsb/fabric8,mwringe/fabric8,rajdavies/fabric8,ffang/fuse-1,ffang/fuse-1,KurtStam/fabric8,rmarting/fuse,rnc/fabric8,zmhassan/fabric8,dejanb/fuse,opensourceconsultant/fuse,tadayosi/fuse,jonathanchristison/fabric8,joelschuster/fuse,rmarting/fuse,sobkowiak/fuse,chirino/fuse,chirino/fabric8v2,jboss-fuse/fuse,PhilHardwick/fabric8,gnodet/fuse,sobkowiak/fabric8,dejanb/fuse,jboss-fuse/fuse,KurtStam/fabric8,chirino/fabric8v2,gnodet/fuse,jimmidyson/fabric8,hekonsek/fabric8,jimmidyson/fabric8,hekonsek/fabric8,avano/fabric8,rajdavies/fabric8,punkhorn/fabric8,janstey/fabric8,opensourceconsultant/fuse,gashcrumb/fabric8,cunningt/fuse,jludvice/fabric8,PhilHardwick/fabric8,chirino/fabric8v2,rajdavies/fabric8,joelschuster/fuse,mwringe/fabric8,gashcrumb/fabric8,PhilHardwick/fabric8,tadayosi/fuse,aslakknutsen/fabric8,PhilHardwick/fabric8,migue/fabric8,jimmidyson/fabric8,joelschuster/fuse,ffang/fuse-1,christian-posta/fabric8,sobkowiak/fabric8,janstey/fuse-1,aslakknutsen/fabric8,mwringe/fabric8,jboss-fuse/fuse,KurtStam/fabric8,rajdavies/fabric8,rhuss/fabric8,jludvice/fabric8,rnc/fabric8,dhirajsb/fabric8,avano/fabric8,rnc/fabric8,EricWittmann/fabric8
/* * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.service.jclouds; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.create; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.exists; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.setData; import java.util.Dictionary; import java.util.Enumeration; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.fusesource.fabric.api.jcip.ThreadSafe; import org.fusesource.fabric.api.scr.AbstractComponent; import org.fusesource.fabric.api.scr.ValidatingReference; import org.fusesource.fabric.zookeeper.ZkPath; import org.jclouds.karaf.core.Constants; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link ConnectionStateListener} that makes sure that whenever it connect to a new ensemble, it updates it with the cloud * provider information that are present in the { * @link ConfigurationAdmin}. * * A typical use case is when creating a cloud ensemble and join it afterwards to update it after the join, with the * cloud provider information, so that the provider doesn't have to be registered twice. * * If for any reason the new ensemble already has registered information for a provider, the provider will be skipped. */ @ThreadSafe @Component(name = "org.fusesource.fabric.jclouds.bridge", description = "Fabric Jclouds Service Bridge", immediate = true) // Done @Service(ConnectionStateListener.class) public final class CloudProviderBridge extends AbstractComponent implements ConnectionStateListener { private static final Logger LOGGER = LoggerFactory.getLogger(CloudProviderBridge.class); private static final String COMPUTE_FILTER = "(service.factoryPid=org.jclouds.compute)"; private static final String BLOBSTORE_FILTER = "(service.factoryPid=org.jclouds.blobstore)"; @Reference(referenceInterface = ConfigurationAdmin.class) private final ValidatingReference<ConfigurationAdmin> configAdmin = new ValidatingReference<ConfigurationAdmin>(); @Reference(referenceInterface = CuratorFramework.class) private final ValidatingReference<CuratorFramework> curator = new ValidatingReference<CuratorFramework>(); @Activate void activate(ComponentContext context) { activateComponent(); } @Deactivate void deactivate() { deactivateComponent(); } @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { if (isValid()) { switch (newState) { case CONNECTED: case RECONNECTED: this.curator.set(client); onConnected(); break; default: onDisconnected(); } } } private void onConnected() { registerServices(COMPUTE_FILTER); registerServices(BLOBSTORE_FILTER); } private void onDisconnected() { } private void registerServices(String filter) { try { Configuration[] configurations = configAdmin.get().listConfigurations(filter); if (configurations != null) { for (Configuration configuration : configurations) { Dictionary properties = configuration.getProperties(); if (properties != null) { String name = properties.get(Constants.NAME) != null ? String.valueOf(properties.get(Constants.NAME)) : null; String identity = properties.get(Constants.IDENTITY) != null ? String.valueOf(properties.get(Constants.IDENTITY)) : null; String credential = properties.get(Constants.CREDENTIAL) != null ? String.valueOf(properties.get(Constants.CREDENTIAL)) : null; if (name != null && identity != null && credential != null && curator.get().getZookeeperClient().isConnected()) { if (exists(curator.get(), ZkPath.CLOUD_SERVICE.getPath(name)) == null) { create(curator.get(), ZkPath.CLOUD_SERVICE.getPath(name)); Enumeration keys = properties.keys(); while (keys.hasMoreElements()) { String key = String.valueOf(keys.nextElement()); String value = String.valueOf(properties.get(key)); if (!key.equals("service.pid") && !key.equals("service.factoryPid")) { setData(curator.get(), ZkPath.CLOUD_SERVICE_PROPERTY.getPath(name, key), value); } } } } } } } } catch (Exception e) { LOGGER.error("Failed to retrieve compute service information from configuration admin.", e); } } void bindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(service); } void unbindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(null); } void bindCurator(CuratorFramework curator) { this.curator.set(curator); } void unbindCurator(CuratorFramework curator) { this.curator.set(null); } }
fabric/fabric-core-agent-jclouds/src/main/java/org/fusesource/fabric/service/jclouds/CloudProviderBridge.java
/* * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.service.jclouds; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.create; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.exists; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.setData; import java.util.Dictionary; import java.util.Enumeration; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.fusesource.fabric.api.scr.AbstractComponent; import org.fusesource.fabric.api.scr.ValidatingReference; import org.fusesource.fabric.zookeeper.ZkPath; import org.jclouds.karaf.core.Constants; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link ConnectionStateListener} that makes sure that whenever it connect to a new ensemble, it updates it with the cloud * provider information that are present in the { * @link ConfigurationAdmin}. * * A typical use case is when creating a cloud ensemble and join it afterwards to update it after the join, with the * cloud provider information, so that the provider doesn't have to be registered twice. * * If for any reason the new ensemble already has registered information for a provider, the provider will be skipped. */ @Component(name = "org.fusesource.fabric.jclouds.bridge", description = "Fabric Jclouds Service Bridge", immediate = true) @Service(ConnectionStateListener.class) public class CloudProviderBridge extends AbstractComponent implements ConnectionStateListener { private static final Logger LOGGER = LoggerFactory.getLogger(CloudProviderBridge.class); private static final String COMPUTE_FILTER = "(service.factoryPid=org.jclouds.compute)"; private static final String BLOBSTORE_FILTER = "(service.factoryPid=org.jclouds.blobstore)"; @Reference(referenceInterface = ConfigurationAdmin.class) private final ValidatingReference<ConfigurationAdmin> configAdmin = new ValidatingReference<ConfigurationAdmin>(); @Reference(referenceInterface = CuratorFramework.class) private final ValidatingReference<CuratorFramework> curator = new ValidatingReference<CuratorFramework>(); @Activate synchronized void activate(ComponentContext context) { activateComponent(); } @Deactivate synchronized void deactivate() { deactivateComponent(); } @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { switch (newState) { case CONNECTED: case RECONNECTED: this.curator.set(client); onConnected(); break; default: onDisconnected(); } } public void onConnected() { registerServices(COMPUTE_FILTER); registerServices(BLOBSTORE_FILTER); } public void onDisconnected() { } public void registerServices(String filter) { try { Configuration[] configurations = configAdmin.get().listConfigurations(filter); if (configurations != null) { for (Configuration configuration : configurations) { Dictionary properties = configuration.getProperties(); if (properties != null) { String name = properties.get(Constants.NAME) != null ? String.valueOf(properties.get(Constants.NAME)) : null; String identity = properties.get(Constants.IDENTITY) != null ? String.valueOf(properties.get(Constants.IDENTITY)) : null; String credential = properties.get(Constants.CREDENTIAL) != null ? String.valueOf(properties.get(Constants.CREDENTIAL)) : null; if (name != null && identity != null && credential != null && getCurator().getZookeeperClient().isConnected()) { if (exists(getCurator(), ZkPath.CLOUD_SERVICE.getPath(name)) == null) { create(getCurator(), ZkPath.CLOUD_SERVICE.getPath(name)); Enumeration keys = properties.keys(); while (keys.hasMoreElements()) { String key = String.valueOf(keys.nextElement()); String value = String.valueOf(properties.get(key)); if (!key.equals("service.pid") && !key.equals("service.factoryPid")) { setData(getCurator(), ZkPath.CLOUD_SERVICE_PROPERTY.getPath(name, key), value); } } } } } } } } catch (Exception e) { LOGGER.error("Failed to retrieve compute service information from configuration admin.", e); } } void bindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(service); } void unbindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(null); } CuratorFramework getCurator() { return curator.get(); } void bindCurator(CuratorFramework curator) { this.curator.set(curator); } void unbindCurator(CuratorFramework curator) { this.curator.set(null); } }
@ThreadSafe CloudProviderBridge
fabric/fabric-core-agent-jclouds/src/main/java/org/fusesource/fabric/service/jclouds/CloudProviderBridge.java
@ThreadSafe CloudProviderBridge
Java
apache-2.0
5ce0e1aa7c77c97f0d68d2aeadbfc5baa8c9f3fa
0
opensingular/singular-server,opensingular/singular-server,opensingular/singular-server
package org.opensingular.server.commons.service; import java.io.Serializable; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import javax.inject.Inject; import javax.transaction.Transactional; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.junit.Assert; import org.junit.Test; import org.opensingular.flow.core.ProcessInstance; import org.opensingular.flow.core.TaskType; import org.opensingular.form.SIComposite; import org.opensingular.form.SInstance; import org.opensingular.form.document.RefSDocumentFactory; import org.opensingular.form.document.RefType; import org.opensingular.form.document.SDocumentFactory; import org.opensingular.form.helpers.AssertionsSInstance; import org.opensingular.form.persistence.entity.FormTypeEntity; import org.opensingular.lib.commons.base.SingularException; import org.opensingular.server.commons.STypeFOO; import org.opensingular.server.commons.persistence.dao.form.PetitionDAO; import org.opensingular.server.commons.persistence.dto.PetitionDTO; import org.opensingular.server.commons.persistence.dto.TaskInstanceDTO; import org.opensingular.server.commons.persistence.entity.form.PetitionEntity; import org.opensingular.server.commons.persistence.filter.QuickFilter; import org.opensingular.server.commons.spring.security.SingularPermission; import org.opensingular.server.commons.test.FOOFlow; import org.opensingular.server.commons.test.SingularCommonsBaseTest; import org.springframework.test.annotation.Rollback; import static org.junit.Assert.*; @Transactional public class PetitionServiceTest extends SingularCommonsBaseTest { @Inject public PetitionService<PetitionEntity, PetitionInstance> petitionService; @Inject public PetitionSender petitionSender; @Inject public SessionFactory sessionFactory; @Inject protected PetitionDAO<PetitionEntity> petitionDAO; @Test public void testName() throws Exception { Session s = sessionFactory.openSession(); org.hibernate.Transaction t = s.beginTransaction(); FormTypeEntity formTypeEntity = new FormTypeEntity(); formTypeEntity.setAbbreviation("nada"); formTypeEntity.setLabel("nada"); formTypeEntity.setCacheVersionNumber(1l); s.saveOrUpdate(formTypeEntity); s.flush(); t.commit(); } @Test public void newPetitionEntity() { PetitionEntity petitionEntity = petitionService.newPetitionEntity(); assertNotNull(petitionEntity); } @Test public void newPetitionInstance() { PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); assertNotNull(petitionInstance); assertEquals(petitionEntity, petitionInstance.getEntity()); } @Test public void saveNewPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); SIComposite mainFormAsInstance = petitionService.getMainFormAsInstance(petitionEntity); new AssertionsSInstance(instance).isValueEquals(mainFormAsInstance); } @Test public void sendNewPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionInstance.setProcessDefinition(FOOFlow.class); petitionSender.send(petitionInstance, instance, "vinicius.nunes"); petitionService.executeTransition("No more bar", petitionInstance, null, null); } @Test public void testFindPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); Optional<PetitionInstance> petition = petitionService.findPetition(petitionInstance.getCod()); } @Test public void testGetPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); PetitionInstance petition = petitionService.getPetition(petitionInstance.getCod()); Assert.assertEquals(petitionInstance.getEntity(), petition.getEntity()); } @Test public void testDeletePetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionService.deletePetition(petitionInstance.getCod()); Assert.assertFalse(petitionService.findPetition(petitionInstance.getCod()).isPresent()); } @Test public void testDeletePetitionWithPetitionDTO() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); PetitionDTO dto = new PetitionDTO(); dto.setCodPeticao(petitionInstance.getCod()); petitionService.deletePetition(dto); Assert.assertFalse(petitionService.findPetition(petitionInstance.getCod()).isPresent()); } @Test public void testListCurrentTaskTransitionsWithEmptyTransitions() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); Assert.assertEquals(0, petitionService.listCurrentTaskTransitions(petitionInstance.getCod()).size()); } @Test(expected = SingularException.class) @Rollback public void testGetPetitionException() { petitionService.getPetition((long)0); } @Test public void quickSearchTests() { long qtdEnviada = 0; long qtdRascunho = 0; List<PetitionEntity> petitionEntities = petitionDAO.listAll(); for (PetitionEntity petitionEntity : petitionEntities) { if (petitionEntity.getProcessInstanceEntity() == null) { qtdRascunho++; } else { qtdEnviada++; } } QuickFilter f1 = new QuickFilter(); List<Map<String, Serializable>> maps1 = petitionService.quickSearchMap(f1); assertEquals(qtdEnviada, maps1.size()); QuickFilter f2 = new QuickFilter(); f2.withRascunho(true).withSortProperty("description"); List<Map<String, Serializable>> maps2 = petitionService.quickSearchMap(f2); assertEquals(qtdRascunho, maps2.size()); QuickFilter f3 = new QuickFilter(); Long count = petitionService.countQuickSearch(f3); assertTrue(count == qtdEnviada); } @Test @Rollback public void countTasks(){ QuickFilter filter = new QuickFilter(); filter.withFilter("filter"); filter.withProcessesAbbreviation(Arrays.asList("task1", "task2")); SingularPermission permission = new SingularPermission("singularId", "internalId"); Assert.assertEquals(new Long(0), petitionService.countTasks(filter, Arrays.asList(permission))); } @Test public void listTasks() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); String description = "Descrição XYZ única - " + System.nanoTime(); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionInstance.setDescription(description); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionInstance.setProcessDefinition(FOOFlow.class); petitionSender.send(petitionInstance, instance, "vinicius.nunes"); QuickFilter filter = new QuickFilter(); filter.withFilter(description); List<TaskInstanceDTO> taskInstanceDTOS = petitionService.listTasks(filter, Collections.emptyList()); assertEquals(1, taskInstanceDTOS.size()); TaskInstanceDTO task = taskInstanceDTOS.get(0); assertNull(task.getCodUsuarioAlocado()); assertNull(task.getNomeUsuarioAlocado()); assertEquals("Do bar", task.getTaskName()); assertEquals(TaskType.PEOPLE, task.getTaskType()); assertEquals("foooooo.StypeFoo", task.getType()); assertEquals(description, task.getDescription()); } @Test public void testSearchs() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); String description = "Descrição XYZ única - " + System.nanoTime(); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionInstance.setDescription(description); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionInstance.setProcessDefinition(FOOFlow.class); petitionSender.send(petitionInstance, instance, "vinicius.nunes"); ProcessInstance processInstance = petitionInstance.getProcessInstance(); PetitionInstance p2 = petitionService.getPetitionInstance(processInstance); PetitionInstance p3 = petitionService.getPetitionInstance(processInstance.getCurrentTaskOrException()); PetitionInstance p4 = petitionService.getPetition(processInstance); PetitionInstance p5 = petitionService.getPetition(processInstance.getCurrentTaskOrException()); assertEquals(petitionInstance.getCod(), p2.getCod()); assertEquals(petitionInstance.getCod(), p3.getCod()); assertEquals(petitionInstance.getCod(), p4.getCod()); assertEquals(petitionInstance.getCod(), p5.getCod()); } @Test public void createPetitionWithoutSave() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance parent = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(parent, instance, true); parent.setProcessDefinition(FOOFlow.class); petitionSender.send(parent, instance, "vinicius.nunes"); PetitionInstance petition = petitionService.createNewPetitionWithoutSave(FOOFlow.class, parent, PetitionInstance::getCod); assertNull(petition.getCod()); } }
server-libs/server-commons/src/test/java/org/opensingular/server/commons/service/PetitionServiceTest.java
package org.opensingular.server.commons.service; import java.io.Serializable; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import javax.inject.Inject; import javax.transaction.Transactional; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.junit.Assert; import org.junit.Test; import org.opensingular.flow.core.ProcessInstance; import org.opensingular.flow.core.TaskType; import org.opensingular.form.SIComposite; import org.opensingular.form.SInstance; import org.opensingular.form.document.RefSDocumentFactory; import org.opensingular.form.document.RefType; import org.opensingular.form.document.SDocumentFactory; import org.opensingular.form.helpers.AssertionsSInstance; import org.opensingular.form.persistence.entity.FormTypeEntity; import org.opensingular.lib.commons.base.SingularException; import org.opensingular.server.commons.STypeFOO; import org.opensingular.server.commons.persistence.dao.form.PetitionDAO; import org.opensingular.server.commons.persistence.dto.PetitionDTO; import org.opensingular.server.commons.persistence.dto.TaskInstanceDTO; import org.opensingular.server.commons.persistence.entity.form.PetitionEntity; import org.opensingular.server.commons.persistence.filter.QuickFilter; import org.opensingular.server.commons.spring.security.SingularPermission; import org.opensingular.server.commons.test.FOOFlow; import org.opensingular.server.commons.test.SingularCommonsBaseTest; import org.springframework.test.annotation.Rollback; import static org.junit.Assert.*; @Transactional public class PetitionServiceTest extends SingularCommonsBaseTest { @Inject public PetitionService<PetitionEntity, PetitionInstance> petitionService; @Inject public PetitionSender petitionSender; @Inject public SessionFactory sessionFactory; @Inject protected PetitionDAO<PetitionEntity> petitionDAO; @Test public void testName() throws Exception { Session s = sessionFactory.openSession(); org.hibernate.Transaction t = s.beginTransaction(); FormTypeEntity formTypeEntity = new FormTypeEntity(); formTypeEntity.setAbbreviation("nada"); formTypeEntity.setLabel("nada"); formTypeEntity.setCacheVersionNumber(1l); s.saveOrUpdate(formTypeEntity); s.flush(); t.commit(); } @Test public void newPetitionEntity() { PetitionEntity petitionEntity = petitionService.newPetitionEntity(); assertNotNull(petitionEntity); } @Test public void newPetitionInstance() { PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); assertNotNull(petitionInstance); assertEquals(petitionEntity, petitionInstance.getEntity()); } @Test public void saveNewPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); SIComposite mainFormAsInstance = petitionService.getMainFormAsInstance(petitionEntity); new AssertionsSInstance(instance).isValueEquals(mainFormAsInstance); } @Test public void sendNewPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionInstance.setProcessDefinition(FOOFlow.class); petitionSender.send(petitionInstance, instance, "vinicius.nunes"); petitionService.executeTransition("No more bar", petitionInstance, null, null); } @Test public void testFindPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); Optional<PetitionInstance> petition = petitionService.findPetition(petitionInstance.getCod()); } @Test public void testGetPetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); PetitionInstance petition = petitionService.getPetition(petitionInstance.getCod()); Assert.assertEquals(petitionInstance.getEntity(), petition.getEntity()); } @Test public void testDeletePetition() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionService.deletePetition(petitionInstance.getCod()); Assert.assertFalse(petitionService.findPetition(petitionInstance.getCod()).isPresent()); } @Test public void testDeletePetitionWithPetitionDTO() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); PetitionDTO dto = new PetitionDTO(); dto.setCodPeticao(petitionInstance.getCod()); petitionService.deletePetition(dto); Assert.assertFalse(petitionService.findPetition(petitionInstance.getCod()).isPresent()); } @Test public void testListCurrentTaskTransitionsWithEmptyTransitions() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionService.saveOrUpdate(petitionInstance, instance, true); Assert.assertEquals(0, petitionService.listCurrentTaskTransitions(petitionInstance.getCod()).size()); } @Test(expected = SingularException.class) @Rollback public void testGetPetitionException() { petitionService.getPetition((long)0); } @Test public void quickSearchTests() { long qtdEnviada = 0; long qtdRascunho = 0; List<PetitionEntity> petitionEntities = petitionDAO.listAll(); for (PetitionEntity petitionEntity : petitionEntities) { if (petitionEntity.getProcessInstanceEntity() == null) { qtdRascunho++; } else { qtdEnviada++; } } QuickFilter f1 = new QuickFilter(); List<Map<String, Serializable>> maps1 = petitionService.quickSearchMap(f1); assertEquals(qtdEnviada, maps1.size()); QuickFilter f2 = new QuickFilter(); f2.withRascunho(true).withSortProperty("description"); List<Map<String, Serializable>> maps2 = petitionService.quickSearchMap(f2); assertEquals(qtdRascunho, maps2.size()); QuickFilter f3 = new QuickFilter(); Long count = petitionService.countQuickSearch(f3); assertTrue(count == qtdEnviada); } @Test @Rollback public void countTasks(){ QuickFilter filter = new QuickFilter(); filter.withFilter("filter"); filter.withProcessesAbbreviation(Arrays.asList("task1", "task2")); SingularPermission permission = new SingularPermission("singularId", "internalId"); Assert.assertEquals(new Long(0), petitionService.countTasks(filter, Arrays.asList(permission))); } @Test public void listTasks() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); String description = "Descrição XYZ única - " + System.nanoTime(); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionInstance.setDescription(description); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionInstance.setProcessDefinition(FOOFlow.class); petitionSender.send(petitionInstance, instance, "vinicius.nunes"); QuickFilter filter = new QuickFilter(); filter.withFilter(description); List<TaskInstanceDTO> taskInstanceDTOS = petitionService.listTasks(filter, Collections.emptyList()); assertEquals(1, taskInstanceDTOS.size()); TaskInstanceDTO task = taskInstanceDTOS.get(0); assertNull(task.getCodUsuarioAlocado()); assertNull(task.getNomeUsuarioAlocado()); assertEquals("Do bar", task.getTaskName()); assertEquals(TaskType.PEOPLE, task.getTaskType()); assertEquals("foooooo.StypeFoo", task.getType()); assertEquals(description, task.getDescription()); } @Test public void testSearchs() { RefSDocumentFactory documentFactoryRef = SDocumentFactory.empty().getDocumentFactoryRef(); SInstance instance = documentFactoryRef.get().createInstance(RefType.of(STypeFOO.class)); String description = "Descrição XYZ única - " + System.nanoTime(); PetitionEntity petitionEntity = petitionService.newPetitionEntity(); PetitionInstance petitionInstance = petitionService.newPetitionInstance(petitionEntity); petitionInstance.setDescription(description); petitionService.saveOrUpdate(petitionInstance, instance, true); petitionInstance.setProcessDefinition(FOOFlow.class); petitionSender.send(petitionInstance, instance, "vinicius.nunes"); ProcessInstance processInstance = petitionInstance.getProcessInstance(); PetitionInstance p2 = petitionService.getPetitionInstance(processInstance); PetitionInstance p3 = petitionService.getPetitionInstance(processInstance.getCurrentTaskOrException()); PetitionInstance p4 = petitionService.getPetition(processInstance); PetitionInstance p5 = petitionService.getPetition(processInstance.getCurrentTaskOrException()); assertEquals(petitionInstance.getCod(), p2.getCod()); assertEquals(petitionInstance.getCod(), p3.getCod()); assertEquals(petitionInstance.getCod(), p4.getCod()); assertEquals(petitionInstance.getCod(), p5.getCod()); } @Test public void createPetitionWithoutSave() { PetitionInstance petition = petitionService.createNewPetitionWithoutSave(null, null, null); assertNull(petition.getCod()); } }
[Testes] - Aumentando cobertura do PetitionService.
server-libs/server-commons/src/test/java/org/opensingular/server/commons/service/PetitionServiceTest.java
[Testes] - Aumentando cobertura do PetitionService.
Java
apache-2.0
4546fee6f4d01e9fac487ae8d1802f0044584df4
0
alter-ego/androidbound,alter-ego/androidbound
package solutions.alterego.androidbound.binding; import java.util.Locale; import rx.Subscription; import rx.functions.Action1; import rx.schedulers.Schedulers; import rx.subscriptions.Subscriptions; import solutions.alterego.androidbound.NullLogger; import solutions.alterego.androidbound.binding.data.BindingMode; import solutions.alterego.androidbound.binding.data.BindingRequest; import solutions.alterego.androidbound.binding.data.BindingSpecification; import solutions.alterego.androidbound.binding.interfaces.IBinding; import solutions.alterego.androidbound.binding.interfaces.IBindingAssociationEngine; import solutions.alterego.androidbound.factories.IBindingFactory; import solutions.alterego.androidbound.interfaces.ILogger; public class BindingAssociationEngine implements IBindingAssociationEngine { private BindingMode mMode; private Object mDataContext; private BindingSpecification mBindingSpecification; private IBinding mSourceBinding; private IBinding mTargetBinding; private Subscription mSourceSubscription; private Subscription mTargetSubscription; private Subscription mSourceAccumulateSubscription = Subscriptions.unsubscribed(); private Subscription mTargetAccumulateSubscription = Subscriptions.unsubscribed(); private ILogger mLogger = NullLogger.instance; private IBindingFactory mSourceFactory; private IBindingFactory mTargetFactory; public BindingAssociationEngine(BindingRequest request, IBindingFactory sourceFactory, IBindingFactory targetFactory, ILogger logger) { mMode = request.getSpecification().getMode(); mSourceFactory = sourceFactory; mTargetFactory = targetFactory; mBindingSpecification = request.getSpecification(); setLogger(logger); createTargetBinding(request.getTarget()); createSourceBinding(request.getSource()); createAccumulateSourceBinding(request.getSource()); createAccumulateTargetBinding(request.getTarget()); if (needsTargetUpdate()) { updateSourceFromTarget(mTargetBinding.getValue()); } if (needsSourceUpdate()) { updateTargetFromSource(mSourceBinding.getValue()); } if (needsTargetAccumulate()) { accumulateItems(mSourceBinding.getValue()); } if (needsSourceAccumulate()) { accumulateItemsToSource(mTargetBinding.getValue()); } } public Object getDataContext() { return mDataContext; } public void setDataContext(Object value) { if (mDataContext == value) { return; } mDataContext = value; if (mSourceBinding != null) { mSourceBinding.dispose(); } if (mSourceSubscription != null) { mSourceSubscription.unsubscribe(); } if (mSourceAccumulateSubscription != null) { mSourceAccumulateSubscription.unsubscribe(); } createSourceBinding(value); if (needsSourceUpdate()) { updateTargetFromSource(mSourceBinding.getValue()); } if (needsTargetAccumulate()) { accumulateItems(mSourceBinding.getValue()); } } private void createSourceBinding(Object source) { boolean needsSubs = needsSourceSubscription(); mSourceBinding = mSourceFactory.create(source, mBindingSpecification.getPath(), needsSubs); if (needsSubs) { if (mSourceBinding.hasChanges()) { mSourceSubscription = mSourceBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(new Action1<Object>() { @Override public void call(Object obj) { updateTargetFromSource(obj); } }); } else { mLogger.warning("Binding " + mBindingSpecification.getPath() + " needs subscription, but changes were not available"); } } } private void createTargetBinding(Object target) { boolean needsSubs = needsTargetSubscription(); mTargetBinding = mTargetFactory.create(target, mBindingSpecification.getTarget(), needsSubs); if (needsSubs) { if (mTargetBinding.hasChanges()) { mTargetSubscription = mTargetBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(new Action1<Object>() { @Override public void call(Object obj) { updateSourceFromTarget(obj); } }); } else { mLogger.warning("Binding " + mBindingSpecification.getTarget() + " needs subscription, but changes were not available."); } } } private void createAccumulateSourceBinding(Object source) { boolean needsSubs = needsTargetAccumulate(); mSourceBinding = mSourceFactory.create(source, mBindingSpecification.getPath(), needsSubs); if (needsSubs) { if (mSourceBinding.hasChanges()) { mSourceAccumulateSubscription = mSourceBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(obj -> { accumulateItems(obj); }); } else { mLogger.warning("Binding " + mBindingSpecification.getPath() + " needs subscription, but changes were not available"); } } } private void createAccumulateTargetBinding(Object target) { boolean needsSubs = needsSourceAccumulate(); mTargetBinding = mTargetFactory.create(target, mBindingSpecification.getTarget(), needsSubs); if (needsSubs) { if (mTargetBinding.hasChanges()) { mTargetAccumulateSubscription = mTargetBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(this::accumulateItemsToSource); } else { mLogger.warning("Binding " + mBindingSpecification.getTarget() + " needs subscription, but changes were not available."); } } } private boolean needsSourceSubscription() { switch (mMode) { case Default: case OneWay: case TwoWay: return true; default: return false; } } private boolean needsTargetSubscription() { switch (mMode) { case Default: case OneWayToSource: case TwoWay: return true; default: return false; } } private boolean needsSourceUpdate() { switch (mMode) { case Default: case OneWayOneTime: case OneWay: case TwoWay: return true; default: return false; } } private boolean needsTargetUpdate() { switch (mMode) { case TwoWay: case OneWayToSource: case OneWayToSourceOneTime: return true; default: return false; } } public boolean needsTargetAccumulate() { return mMode == BindingMode.Accumulate || mMode == BindingMode.AccumulateTwoWay; } public boolean needsSourceAccumulate() { return mMode == BindingMode.AccumulateToSource || mMode == BindingMode.AccumulateTwoWay; } protected void updateTargetFromSource(Object obj) { Object result; try { if (obj != IBinding.noValue) { result = mBindingSpecification .getValueConverter() .convert(obj, mTargetBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); } else { mLogger.warning("Switching to fallback value for " + mBindingSpecification.getPath()); result = mBindingSpecification.getFallbackValue(); } mTargetBinding.setValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getPath() + " to target " + mBindingSpecification.getTarget() + ": " + e.getMessage()); } } protected void updateSourceFromTarget(Object obj) { try { Object result = mBindingSpecification .getValueConverter() .convertBack(obj, mSourceBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); mSourceBinding.setValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getTarget() + " to source " + mBindingSpecification.getPath() + ": " + e.getMessage()); } } private void accumulateItems(Object obj) { Object result; try { if (obj != IBinding.noValue) { result = mBindingSpecification .getValueConverter() .convert(obj, mSourceBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); } else { mLogger.warning("Switching to fallback value for " + mBindingSpecification.getPath()); result = mBindingSpecification.getFallbackValue(); } mTargetBinding.addValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getPath() + " to target " + mBindingSpecification.getTarget() + ": " + e.getMessage()); } } private void accumulateItemsToSource(Object obj) { try { Object result = mBindingSpecification .getValueConverter() .convertBack(obj, mSourceBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); mSourceBinding.addValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getTarget() + " to source " + mBindingSpecification.getPath() + ": " + e.getMessage()); } } public void setLogger(ILogger logger) { mLogger = logger.getLogger(this); } public void dispose() { if (mSourceSubscription != null) { mSourceSubscription.unsubscribe(); } if (mTargetSubscription != null) { mTargetSubscription.unsubscribe(); } if (mSourceBinding != null) { mSourceBinding.dispose(); } if (mTargetBinding != null) { mTargetBinding.dispose(); } mTargetAccumulateSubscription.unsubscribe(); mSourceAccumulateSubscription.unsubscribe(); } }
AndroidBound/src/main/java/solutions/alterego/androidbound/binding/BindingAssociationEngine.java
package solutions.alterego.androidbound.binding; import java.util.Locale; import rx.Subscription; import rx.functions.Action1; import rx.schedulers.Schedulers; import rx.subscriptions.Subscriptions; import solutions.alterego.androidbound.NullLogger; import solutions.alterego.androidbound.binding.data.BindingMode; import solutions.alterego.androidbound.binding.data.BindingRequest; import solutions.alterego.androidbound.binding.data.BindingSpecification; import solutions.alterego.androidbound.binding.interfaces.IBinding; import solutions.alterego.androidbound.binding.interfaces.IBindingAssociationEngine; import solutions.alterego.androidbound.factories.IBindingFactory; import solutions.alterego.androidbound.interfaces.ILogger; public class BindingAssociationEngine implements IBindingAssociationEngine { private BindingMode mMode; private Object mDataContext; private BindingSpecification mBindingSpecification; private IBinding mSourceBinding; private IBinding mTargetBinding; private Subscription mSourceSubscription; private Subscription mTargetSubscription; private Subscription mSourceAccumulateSubscription = Subscriptions.unsubscribed(); private Subscription mTargetAccumulateSubscription = Subscriptions.unsubscribed(); private ILogger mLogger = NullLogger.instance; private IBindingFactory mSourceFactory; private IBindingFactory mTargetFactory; public BindingAssociationEngine(BindingRequest request, IBindingFactory sourceFactory, IBindingFactory targetFactory, ILogger logger) { mMode = request.getSpecification().getMode(); mSourceFactory = sourceFactory; mTargetFactory = targetFactory; mBindingSpecification = request.getSpecification(); setLogger(logger); createTargetBinding(request.getTarget()); createSourceBinding(request.getSource()); createAccumulateSourceBinding(request.getSource()); createAccumulateTargetBinding(request.getTarget()); if (needsTargetUpdate()) { updateSourceFromTarget(mTargetBinding.getValue()); } if (needsSourceUpdate()) { updateTargetFromSource(mSourceBinding.getValue()); } if (needsTargetAccumulate()) { accumulateItems(mSourceBinding.getValue()); } if (needsSourceAccumulate()) { accumulateItemsToSource(mTargetBinding.getValue()); } } public Object getDataContext() { return mDataContext; } public void setDataContext(Object value) { if (mDataContext == value) { return; } mDataContext = value; if (mSourceBinding != null) { mSourceBinding.dispose(); } if (mSourceSubscription != null) { mSourceSubscription.unsubscribe(); } createSourceBinding(value); if (needsSourceUpdate()) { updateTargetFromSource(mSourceBinding.getValue()); } if (needsTargetAccumulate()) { accumulateItems(mSourceBinding.getValue()); } } private void createSourceBinding(Object source) { boolean needsSubs = needsSourceSubscription(); mSourceBinding = mSourceFactory.create(source, mBindingSpecification.getPath(), needsSubs); if (needsSubs) { if (mSourceBinding.hasChanges()) { mSourceSubscription = mSourceBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(new Action1<Object>() { @Override public void call(Object obj) { updateTargetFromSource(obj); } }); } else { mLogger.warning("Binding " + mBindingSpecification.getPath() + " needs subscription, but changes were not available"); } } } private void createTargetBinding(Object target) { boolean needsSubs = needsTargetSubscription(); mTargetBinding = mTargetFactory.create(target, mBindingSpecification.getTarget(), needsSubs); if (needsSubs) { if (mTargetBinding.hasChanges()) { mTargetSubscription = mTargetBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(new Action1<Object>() { @Override public void call(Object obj) { updateSourceFromTarget(obj); } }); } else { mLogger.warning("Binding " + mBindingSpecification.getTarget() + " needs subscription, but changes were not available."); } } } private void createAccumulateSourceBinding(Object source) { boolean needsSubs = needsTargetAccumulate(); mSourceBinding = mSourceFactory.create(source, mBindingSpecification.getPath(), needsSubs); if (needsSubs) { if (mSourceBinding.hasChanges()) { mSourceAccumulateSubscription = mSourceBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(obj -> { accumulateItems(obj); }); } else { mLogger.warning("Binding " + mBindingSpecification.getPath() + " needs subscription, but changes were not available"); } } } private void createAccumulateTargetBinding(Object target) { boolean needsSubs = needsSourceAccumulate(); mTargetBinding = mTargetFactory.create(target, mBindingSpecification.getTarget(), needsSubs); if (needsSubs) { if (mTargetBinding.hasChanges()) { mTargetAccumulateSubscription = mTargetBinding.getChanges() .subscribeOn(Schedulers.computation()) .subscribe(this::accumulateItemsToSource); } else { mLogger.warning("Binding " + mBindingSpecification.getTarget() + " needs subscription, but changes were not available."); } } } private boolean needsSourceSubscription() { switch (mMode) { case Default: case OneWay: case TwoWay: return true; default: return false; } } private boolean needsTargetSubscription() { switch (mMode) { case Default: case OneWayToSource: case TwoWay: return true; default: return false; } } private boolean needsSourceUpdate() { switch (mMode) { case Default: case OneWayOneTime: case OneWay: case TwoWay: return true; default: return false; } } private boolean needsTargetUpdate() { switch (mMode) { case TwoWay: case OneWayToSource: case OneWayToSourceOneTime: return true; default: return false; } } public boolean needsTargetAccumulate() { return mMode == BindingMode.Accumulate || mMode == BindingMode.AccumulateTwoWay; } public boolean needsSourceAccumulate() { return mMode == BindingMode.AccumulateToSource || mMode == BindingMode.AccumulateTwoWay; } protected void updateTargetFromSource(Object obj) { Object result; try { if (obj != IBinding.noValue) { result = mBindingSpecification .getValueConverter() .convert(obj, mTargetBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); } else { mLogger.warning("Switching to fallback value for " + mBindingSpecification.getPath()); result = mBindingSpecification.getFallbackValue(); } mTargetBinding.setValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getPath() + " to target " + mBindingSpecification.getTarget() + ": " + e.getMessage()); } } protected void updateSourceFromTarget(Object obj) { try { Object result = mBindingSpecification .getValueConverter() .convertBack(obj, mSourceBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); mSourceBinding.setValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getTarget() + " to source " + mBindingSpecification.getPath() + ": " + e.getMessage()); } } private void accumulateItems(Object obj) { Object result; try { if (obj != IBinding.noValue) { result = mBindingSpecification .getValueConverter() .convert(obj, mSourceBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); } else { mLogger.warning("Switching to fallback value for " + mBindingSpecification.getPath()); result = mBindingSpecification.getFallbackValue(); } mTargetBinding.addValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getPath() + " to target " + mBindingSpecification.getTarget() + ": " + e.getMessage()); } } private void accumulateItemsToSource(Object obj) { try { Object result = mBindingSpecification .getValueConverter() .convertBack(obj, mSourceBinding.getType(), mBindingSpecification.getConverterParameter(), Locale.getDefault()); mSourceBinding.addValue(result); } catch (Exception e) { mLogger.error("Error occurred while binding " + mBindingSpecification.getTarget() + " to source " + mBindingSpecification.getPath() + ": " + e.getMessage()); } } public void setLogger(ILogger logger) { mLogger = logger.getLogger(this); } public void dispose() { if (mSourceSubscription != null) { mSourceSubscription.unsubscribe(); } if (mTargetSubscription != null) { mTargetSubscription.unsubscribe(); } if (mSourceBinding != null) { mSourceBinding.dispose(); } if (mTargetBinding != null) { mTargetBinding.dispose(); } mTargetAccumulateSubscription.unsubscribe(); mSourceAccumulateSubscription.unsubscribe(); } }
Added missining unsubscribe mSourceAccumulateSubscription
AndroidBound/src/main/java/solutions/alterego/androidbound/binding/BindingAssociationEngine.java
Added missining unsubscribe mSourceAccumulateSubscription
Java
bsd-2-clause
4416cbd4d30cdf8b8d72ee76b8bc6b76a688ac6d
0
runelite/runelite,runelite/runelite,runelite/runelite
/* * Copyright (c) 2018, Psikoi <https://github.com/psikoi> * Copyright (c) 2018, Adam <[email protected]> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.loottracker; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.HashMultiset; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.common.collect.Multiset; import com.google.common.collect.Multisets; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import com.google.inject.Provides; import java.awt.image.BufferedImage; import java.time.Duration; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.annotation.Nullable; import javax.inject.Inject; import javax.swing.SwingUtilities; import lombok.AccessLevel; import lombok.Getter; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import net.runelite.api.ChatMessageType; import net.runelite.api.Client; import net.runelite.api.GameState; import net.runelite.api.InventoryID; import net.runelite.api.ItemComposition; import net.runelite.api.ItemContainer; import net.runelite.api.ItemID; import net.runelite.api.MenuAction; import net.runelite.api.MessageNode; import net.runelite.api.NPC; import net.runelite.api.ObjectID; import net.runelite.api.Player; import net.runelite.api.Skill; import net.runelite.api.SpriteID; import net.runelite.api.WorldType; import net.runelite.api.coords.WorldPoint; import net.runelite.api.events.ChatMessage; import net.runelite.api.events.GameStateChanged; import net.runelite.api.events.ItemContainerChanged; import net.runelite.api.events.MenuOptionClicked; import net.runelite.api.events.WidgetLoaded; import net.runelite.api.widgets.WidgetID; import net.runelite.client.account.AccountSession; import net.runelite.client.account.SessionManager; import net.runelite.client.callback.ClientThread; import net.runelite.client.chat.ChatColorType; import net.runelite.client.chat.ChatMessageBuilder; import net.runelite.client.chat.ChatMessageManager; import net.runelite.client.chat.QueuedMessage; import net.runelite.client.config.ConfigManager; import net.runelite.client.eventbus.EventBus; import net.runelite.client.eventbus.Subscribe; import net.runelite.client.events.ClientShutdown; import net.runelite.client.events.ConfigChanged; import net.runelite.client.events.NpcLootReceived; import net.runelite.client.events.PlayerLootReceived; import net.runelite.client.events.RuneScapeProfileChanged; import net.runelite.client.events.SessionClose; import net.runelite.client.events.SessionOpen; import net.runelite.client.game.ItemManager; import net.runelite.client.game.ItemStack; import net.runelite.client.game.LootManager; import net.runelite.client.game.SpriteManager; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.task.Schedule; import net.runelite.client.ui.ClientToolbar; import net.runelite.client.ui.NavigationButton; import net.runelite.client.util.ImageUtil; import net.runelite.client.util.QuantityFormatter; import net.runelite.client.util.Text; import net.runelite.http.api.loottracker.GameItem; import net.runelite.http.api.loottracker.LootAggregate; import net.runelite.http.api.loottracker.LootRecord; import net.runelite.http.api.loottracker.LootRecordType; import org.apache.commons.text.WordUtils; @PluginDescriptor( name = "Loot Tracker", description = "Tracks loot from monsters and minigames", tags = {"drops"} ) @Slf4j public class LootTrackerPlugin extends Plugin { private static final int MAX_DROPS = 1024; private static final Duration MAX_AGE = Duration.ofDays(365L); // Activity/Event loot handling private static final Pattern CLUE_SCROLL_PATTERN = Pattern.compile("You have completed [0-9]+ ([a-z]+) Treasure Trails?\\."); private static final int THEATRE_OF_BLOOD_REGION = 12867; private static final int THEATRE_OF_BLOOD_LOBBY = 14642; // Herbiboar loot handling @VisibleForTesting static final String HERBIBOAR_LOOTED_MESSAGE = "You harvest herbs from the herbiboar, whereupon it escapes."; private static final String HERBIBOAR_EVENT = "Herbiboar"; private static final Pattern HERBIBOAR_HERB_SACK_PATTERN = Pattern.compile(".+(Grimy .+?) herb.+"); // Seed Pack loot handling private static final String SEEDPACK_EVENT = "Seed pack"; // Hespori loot handling private static final String HESPORI_LOOTED_MESSAGE = "You have successfully cleared this patch for new crops."; private static final String HESPORI_EVENT = "Hespori"; private static final int HESPORI_REGION = 5021; // Chest loot handling private static final String CHEST_LOOTED_MESSAGE = "You find some treasure in the chest!"; private static final Pattern ROGUES_CHEST_PATTERN = Pattern.compile("You find (a|some)([a-z\\s]*) inside."); private static final Pattern LARRAN_LOOTED_PATTERN = Pattern.compile("You have opened Larran's (big|small) chest .*"); // Used by Stone Chest, Isle of Souls chest, Dark Chest private static final String OTHER_CHEST_LOOTED_MESSAGE = "You steal some loot from the chest."; private static final String DORGESH_KAAN_CHEST_LOOTED_MESSAGE = "You find treasure inside!"; private static final String GRUBBY_CHEST_LOOTED_MESSAGE = "You have opened the Grubby Chest"; private static final Pattern HAM_CHEST_LOOTED_PATTERN = Pattern.compile("Your (?<key>[a-z]+) key breaks in the lock.*"); private static final int HAM_STOREROOM_REGION = 10321; private static final Map<Integer, String> CHEST_EVENT_TYPES = new ImmutableMap.Builder<Integer, String>(). put(5179, "Brimstone Chest"). put(11573, "Crystal Chest"). put(12093, "Larran's big chest"). put(12127, "The Gauntlet"). put(13113, "Larran's small chest"). put(13151, "Elven Crystal Chest"). put(5277, "Stone chest"). put(10835, "Dorgesh-Kaan Chest"). put(10834, "Dorgesh-Kaan Chest"). put(7323, "Grubby Chest"). put(8593, "Isle of Souls Chest"). put(7827, "Dark Chest"). put(13117, "Rogues' Chest"). build(); // Shade chest loot handling private static final Pattern SHADE_CHEST_NO_KEY_PATTERN = Pattern.compile("You need a [a-z]+ key with a [a-z]+ trim to open this chest .*"); private static final Map<Integer, String> SHADE_CHEST_OBJECTS = new ImmutableMap.Builder<Integer, String>(). put(ObjectID.BRONZE_CHEST, "Bronze key red"). put(ObjectID.BRONZE_CHEST_4112, "Bronze key brown"). put(ObjectID.BRONZE_CHEST_4113, "Bronze key crimson"). put(ObjectID.BRONZE_CHEST_4114, "Bronze key black"). put(ObjectID.BRONZE_CHEST_4115, "Bronze key purple"). put(ObjectID.STEEL_CHEST, "Steel key red"). put(ObjectID.STEEL_CHEST_4117, "Steel key brown"). put(ObjectID.STEEL_CHEST_4118, "Steel key crimson"). put(ObjectID.STEEL_CHEST_4119, "Steel key black"). put(ObjectID.STEEL_CHEST_4120, "Steel key purple"). put(ObjectID.BLACK_CHEST, "Black key red"). put(ObjectID.BLACK_CHEST_4122, "Black key brown"). put(ObjectID.BLACK_CHEST_4123, "Black key crimson"). put(ObjectID.BLACK_CHEST_4124, "Black key black"). put(ObjectID.BLACK_CHEST_4125, "Black key purple"). put(ObjectID.SILVER_CHEST, "Silver key red"). put(ObjectID.SILVER_CHEST_4127, "Silver key brown"). put(ObjectID.SILVER_CHEST_4128, "Silver key crimson"). put(ObjectID.SILVER_CHEST_4129, "Silver key black"). put(ObjectID.SILVER_CHEST_4130, "Silver key purple"). put(ObjectID.GOLD_CHEST, "Gold key red"). put(ObjectID.GOLD_CHEST_41213, "Gold key brown"). put(ObjectID.GOLD_CHEST_41214, "Gold key crimson"). put(ObjectID.GOLD_CHEST_41215, "Gold key black"). put(ObjectID.GOLD_CHEST_41216, "Gold key purple"). build(); // Hallow Sepulchre Coffin handling private static final String COFFIN_LOOTED_MESSAGE = "You push the coffin lid aside."; private static final String HALLOWED_SEPULCHRE_COFFIN_EVENT = "Coffin (Hallowed Sepulchre)"; private static final Set<Integer> HALLOWED_SEPULCHRE_MAP_REGIONS = ImmutableSet.of(8797, 10077, 9308, 10074, 9050); // one map region per floor private static final String HALLOWED_SACK_EVENT = "Hallowed Sack"; // Last man standing map regions private static final Set<Integer> LAST_MAN_STANDING_REGIONS = ImmutableSet.of(13658, 13659, 13660, 13914, 13915, 13916, 13918, 13919, 13920, 14174, 14175, 14176, 14430, 14431, 14432); private static final Pattern PICKPOCKET_REGEX = Pattern.compile("You pick (the )?(?<target>.+)'s? pocket.*"); private static final String BIRDNEST_EVENT = "Bird nest"; private static final Set<Integer> BIRDNEST_IDS = ImmutableSet.of(ItemID.BIRD_NEST, ItemID.BIRD_NEST_5071, ItemID.BIRD_NEST_5072, ItemID.BIRD_NEST_5073, ItemID.BIRD_NEST_5074, ItemID.BIRD_NEST_7413, ItemID.BIRD_NEST_13653, ItemID.BIRD_NEST_22798, ItemID.BIRD_NEST_22800); // Birdhouses private static final Pattern BIRDHOUSE_PATTERN = Pattern.compile("You dismantle and discard the trap, retrieving (?:(?:a|\\d{1,2}) nests?, )?10 dead birds, \\d{1,3} feathers and (\\d,?\\d{1,3}) Hunter XP\\."); private static final Map<Integer, String> BIRDHOUSE_XP_TO_TYPE = new ImmutableMap.Builder<Integer, String>(). put(280, "Regular Bird House"). put(420, "Oak Bird House"). put(560, "Willow Bird House"). put(700, "Teak Bird House"). put(820, "Maple Bird House"). put(960, "Mahogany Bird House"). put(1020, "Yew Bird House"). put(1140, "Magic Bird House"). put(1200, "Redwood Bird House"). build(); /* * This map is used when a pickpocket target has a different name in the chat message than their in-game name. * Note that if the two NPCs can be found in the same place, there is a chance of race conditions * occurring when changing targets mid-pickpocket, in which case a different solution may need to be considered. */ private static final Multimap<String, String> PICKPOCKET_DISAMBIGUATION_MAP = ImmutableMultimap.of( "H.A.M. Member", "Man", "H.A.M. Member", "Woman" ); private static final String CASKET_EVENT = "Casket"; private static final String WINTERTODT_SUPPLY_CRATE_EVENT = "Supply crate (Wintertodt)"; // Soul Wars private static final String SPOILS_OF_WAR_EVENT = "Spoils of war"; private static final Set<Integer> SOUL_WARS_REGIONS = ImmutableSet.of(8493, 8749, 9005); // Tempoross private static final String TEMPOROSS_EVENT = "Reward pool (Tempoross)"; private static final String TEMPOROSS_CASKET_EVENT = "Casket (Tempoross)"; private static final String TEMPOROSS_LOOT_STRING = "You found some loot: "; private static final int TEMPOROSS_REGION = 12588; // Guardians of the Rift private static final String GUARDIANS_OF_THE_RIFT_EVENT = "Guardians of the Rift"; private static final String INTRICATE_POUCH_EVENT = "Intricate pouch"; private static final String GUARDIANS_OF_THE_RIFT_LOOT_STRING = "You found some loot: "; private static final int GUARDIANS_OF_THE_RIFT_REGION = 14484; // Mahogany Homes private static final String MAHOGANY_CRATE_EVENT = "Supply crate (Mahogany Homes)"; // Implings private static final Set<Integer> IMPLING_JARS = ImmutableSet.of( ItemID.BABY_IMPLING_JAR, ItemID.YOUNG_IMPLING_JAR, ItemID.GOURMET_IMPLING_JAR, ItemID.EARTH_IMPLING_JAR, ItemID.ESSENCE_IMPLING_JAR, ItemID.ECLECTIC_IMPLING_JAR, ItemID.NATURE_IMPLING_JAR, ItemID.MAGPIE_IMPLING_JAR, ItemID.NINJA_IMPLING_JAR, ItemID.CRYSTAL_IMPLING_JAR, ItemID.DRAGON_IMPLING_JAR, ItemID.LUCKY_IMPLING_JAR ); private static final String IMPLING_CATCH_MESSAGE = "You manage to catch the impling and acquire some loot."; // Raids private static final String CHAMBERS_OF_XERIC = "Chambers of Xeric"; private static final String THEATRE_OF_BLOOD = "Theatre of Blood"; private static final String TOMBS_OF_AMASCUT = "Tombs of Amascut"; private static final Set<Character> VOWELS = ImmutableSet.of('a', 'e', 'i', 'o', 'u'); @Inject private ClientToolbar clientToolbar; @Inject private ItemManager itemManager; @Inject private SpriteManager spriteManager; @Inject private LootTrackerConfig config; @Inject private Client client; @Inject private ClientThread clientThread; @Inject private SessionManager sessionManager; @Inject private ScheduledExecutorService executor; @Inject private EventBus eventBus; @Inject private ChatMessageManager chatMessageManager; @Inject private LootManager lootManager; @Inject private ConfigManager configManager; @Inject private Gson gson; @Getter(AccessLevel.PACKAGE) @Inject private LootTrackerClient lootTrackerClient; private LootTrackerPanel panel; private NavigationButton navButton; private boolean chestLooted; private String lastPickpocketTarget; private List<String> ignoredItems = new ArrayList<>(); private List<String> ignoredEvents = new ArrayList<>(); private InventoryID inventoryId; private Multiset<Integer> inventorySnapshot; private InvChangeCallback inventorySnapshotCb; private final List<LootRecord> queuedLoots = new ArrayList<>(); private String profileKey; private static Collection<ItemStack> stack(Collection<ItemStack> items) { final List<ItemStack> list = new ArrayList<>(); for (final ItemStack item : items) { int quantity = 0; for (final ItemStack i : list) { if (i.getId() == item.getId()) { quantity = i.getQuantity(); list.remove(i); break; } } if (quantity > 0) { list.add(new ItemStack(item.getId(), item.getQuantity() + quantity, item.getLocation())); } else { list.add(item); } } return list; } @Provides LootTrackerConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(LootTrackerConfig.class); } @Subscribe public void onSessionOpen(SessionOpen sessionOpen) { AccountSession accountSession = sessionManager.getAccountSession(); if (accountSession.getUuid() != null) { lootTrackerClient.setUuid(accountSession.getUuid()); } else { lootTrackerClient.setUuid(null); } } @Subscribe public void onSessionClose(SessionClose sessionClose) { submitLoot(); lootTrackerClient.setUuid(null); } @Subscribe public void onRuneScapeProfileChanged(RuneScapeProfileChanged e) { final String profileKey = configManager.getRSProfileKey(); if (profileKey == null) { return; } if (profileKey.equals(this.profileKey)) { return; } switchProfile(profileKey); } private void switchProfile(String profileKey) { executor.execute(() -> { // Current queued loot is for the previous profile, so save it first with the current profile key submitLoot(); this.profileKey = profileKey; log.debug("Switched to profile {}", profileKey); if (!config.syncPanel()) { return; } int drops = 0; List<ConfigLoot> loots = new ArrayList<>(); Instant old = Instant.now().minus(MAX_AGE); for (String key : configManager.getRSProfileConfigurationKeys(LootTrackerConfig.GROUP, profileKey, "drops_")) { String json = configManager.getConfiguration(LootTrackerConfig.GROUP, profileKey, key); ConfigLoot configLoot; try { configLoot = gson.fromJson(json, ConfigLoot.class); } catch (JsonSyntaxException ex) { log.warn("Removing loot with malformed json: {}", json, ex); configManager.unsetConfiguration(LootTrackerConfig.GROUP, profileKey, key); continue; } if (configLoot.last.isBefore(old)) { log.debug("Removing old loot for {} {}", configLoot.type, configLoot.name); configManager.unsetConfiguration(LootTrackerConfig.GROUP, profileKey, key); continue; } if (drops >= MAX_DROPS && !loots.isEmpty() && loots.get(0).last.isAfter(configLoot.last)) { // fast drop continue; } sortedInsert(loots, configLoot, Comparator.comparing(ConfigLoot::getLast)); drops += configLoot.numDrops(); if (drops >= MAX_DROPS) { ConfigLoot top = loots.remove(0); drops -= top.numDrops(); } } log.debug("Loaded {} records", loots.size()); clientThread.invokeLater(() -> { // convertToLootTrackerRecord requires item compositions to be available to get the item name, // so it can't be run while the client is starting if (client.getGameState().getState() < GameState.LOGIN_SCREEN.getState()) { return false; } // convertToLootTrackerRecord must be called on client thread List<LootTrackerRecord> records = loots.stream() .map(this::convertToLootTrackerRecord) .collect(Collectors.toList()); SwingUtilities.invokeLater(() -> { panel.clearRecords(); panel.addRecords(records); }); return true; }); }); } private static <T> void sortedInsert(List<T> list, T value, Comparator<? super T> c) { int idx = Collections.binarySearch(list, value, c); list.add(idx < 0 ? -idx - 1 : idx, value); } @Subscribe public void onConfigChanged(ConfigChanged event) { if (event.getGroup().equals(LootTrackerConfig.GROUP)) { if ("ignoredItems".equals(event.getKey()) || "ignoredEvents".equals(event.getKey())) { ignoredItems = Text.fromCSV(config.getIgnoredItems()); ignoredEvents = Text.fromCSV(config.getIgnoredEvents()); SwingUtilities.invokeLater(panel::updateIgnoredRecords); } } } @Override protected void startUp() throws Exception { profileKey = null; ignoredItems = Text.fromCSV(config.getIgnoredItems()); ignoredEvents = Text.fromCSV(config.getIgnoredEvents()); panel = new LootTrackerPanel(this, itemManager, config); spriteManager.getSpriteAsync(SpriteID.TAB_INVENTORY, 0, panel::loadHeaderIcon); final BufferedImage icon = ImageUtil.loadImageResource(getClass(), "panel_icon.png"); navButton = NavigationButton.builder() .tooltip("Loot Tracker") .icon(icon) .priority(5) .panel(panel) .build(); clientToolbar.addNavigation(navButton); AccountSession accountSession = sessionManager.getAccountSession(); if (accountSession != null) { lootTrackerClient.setUuid(accountSession.getUuid()); } String profileKey = configManager.getRSProfileKey(); if (profileKey != null) { switchProfile(profileKey); } } @Override protected void shutDown() { submitLoot(); clientToolbar.removeNavigation(navButton); lootTrackerClient.setUuid(null); chestLooted = false; } @Subscribe public void onClientShutdown(ClientShutdown event) { Future<Void> future = submitLoot(); if (future != null) { event.waitFor(future); } } @Subscribe public void onGameStateChanged(final GameStateChanged event) { if (event.getGameState() == GameState.LOADING && !client.isInInstancedRegion()) { chestLooted = false; } } void addLoot(@NonNull String name, int combatLevel, LootRecordType type, Object metadata, Collection<ItemStack> items) { addLoot(name, combatLevel, type, metadata, items, 1); } void addLoot(@NonNull String name, int combatLevel, LootRecordType type, Object metadata, Collection<ItemStack> items, int amount) { final LootTrackerItem[] entries = buildEntries(stack(items)); SwingUtilities.invokeLater(() -> panel.add(name, type, combatLevel, entries, amount)); LootRecord lootRecord = new LootRecord(name, type, metadata, toGameItems(items), Instant.now(), getLootWorldId()); synchronized (queuedLoots) { queuedLoots.add(lootRecord); } eventBus.post(new LootReceived(name, combatLevel, type, items, amount)); } private Integer getLootWorldId() { // For the wiki to determine drop rates based on dmm brackets / identify leagues drops return client.getWorldType().contains(WorldType.SEASONAL) ? client.getWorld() : null; } @Subscribe public void onNpcLootReceived(final NpcLootReceived npcLootReceived) { final NPC npc = npcLootReceived.getNpc(); final Collection<ItemStack> items = npcLootReceived.getItems(); final String name = npc.getName(); final int combat = npc.getCombatLevel(); addLoot(name, combat, LootRecordType.NPC, npc.getId(), items); if (config.npcKillChatMessage()) { final String prefix = VOWELS.contains(Character.toLowerCase(name.charAt(0))) ? "an" : "a"; lootReceivedChatMessage(items, prefix + ' ' + name); } } @Subscribe public void onPlayerLootReceived(final PlayerLootReceived playerLootReceived) { // Ignore Last Man Standing and Soul Wars player loots if (isPlayerWithinMapRegion(LAST_MAN_STANDING_REGIONS) || isPlayerWithinMapRegion(SOUL_WARS_REGIONS)) { return; } final Player player = playerLootReceived.getPlayer(); final Collection<ItemStack> items = playerLootReceived.getItems(); final String name = player.getName(); final int combat = player.getCombatLevel(); addLoot(name, combat, LootRecordType.PLAYER, null, items); if (config.pvpKillChatMessage()) { lootReceivedChatMessage(items, name); } } @Subscribe public void onWidgetLoaded(WidgetLoaded widgetLoaded) { String event; Object metadata = null; final ItemContainer container; switch (widgetLoaded.getGroupId()) { case (WidgetID.BARROWS_REWARD_GROUP_ID): event = "Barrows"; container = client.getItemContainer(InventoryID.BARROWS_REWARD); break; case (WidgetID.CHAMBERS_OF_XERIC_REWARD_GROUP_ID): if (chestLooted) { return; } event = CHAMBERS_OF_XERIC; container = client.getItemContainer(InventoryID.CHAMBERS_OF_XERIC_CHEST); chestLooted = true; break; case (WidgetID.THEATRE_OF_BLOOD_GROUP_ID): if (chestLooted) { return; } int region = WorldPoint.fromLocalInstance(client, client.getLocalPlayer().getLocalLocation()).getRegionID(); if (region != THEATRE_OF_BLOOD_REGION && region != THEATRE_OF_BLOOD_LOBBY) { return; } event = THEATRE_OF_BLOOD; container = client.getItemContainer(InventoryID.THEATRE_OF_BLOOD_CHEST); chestLooted = true; break; case WidgetID.TOA_REWARD_GROUP_ID: if (chestLooted) { return; } event = TOMBS_OF_AMASCUT; container = client.getItemContainer(InventoryID.TOA_REWARD_CHEST); chestLooted = true; break; case (WidgetID.KINGDOM_GROUP_ID): event = "Kingdom of Miscellania"; container = client.getItemContainer(InventoryID.KINGDOM_OF_MISCELLANIA); break; case (WidgetID.FISHING_TRAWLER_REWARD_GROUP_ID): event = "Fishing Trawler"; metadata = client.getBoostedSkillLevel(Skill.FISHING); container = client.getItemContainer(InventoryID.FISHING_TRAWLER_REWARD); break; case (WidgetID.DRIFT_NET_FISHING_REWARD_GROUP_ID): event = "Drift Net"; metadata = client.getBoostedSkillLevel(Skill.FISHING); container = client.getItemContainer(InventoryID.DRIFT_NET_FISHING_REWARD); break; case WidgetID.WILDERNESS_LOOT_CHEST: if (chestLooted) { return; } event = "Loot Chest"; container = client.getItemContainer(InventoryID.WILDERNESS_LOOT_CHEST); chestLooted = true; break; default: return; } if (container == null) { return; } // Convert container items to array of ItemStack final Collection<ItemStack> items = Arrays.stream(container.getItems()) .filter(item -> item.getId() > 0) .map(item -> new ItemStack(item.getId(), item.getQuantity(), client.getLocalPlayer().getLocalLocation())) .collect(Collectors.toList()); if (config.showRaidsLootValue() && (event.equals(THEATRE_OF_BLOOD) || event.equals(CHAMBERS_OF_XERIC)) || event.equals(TOMBS_OF_AMASCUT)) { long totalValue = items.stream() .filter(item -> item.getId() > -1) .mapToLong(item -> config.priceType() == LootTrackerPriceType.GRAND_EXCHANGE ? (long) itemManager.getItemPrice(item.getId()) * item.getQuantity() : (long) itemManager.getItemComposition(item.getId()).getHaPrice() * item.getQuantity()) .sum(); String chatMessage = new ChatMessageBuilder() .append(ChatColorType.NORMAL) .append("Your loot is worth around ") .append(ChatColorType.HIGHLIGHT) .append(QuantityFormatter.formatNumber(totalValue)) .append(ChatColorType.NORMAL) .append(" coins.") .build(); chatMessageManager.queue(QueuedMessage.builder() .type(ChatMessageType.FRIENDSCHATNOTIFICATION) .runeLiteFormattedMessage(chatMessage) .build()); } if (items.isEmpty()) { log.debug("No items to find for Event: {} | Container: {}", event, container); return; } addLoot(event, -1, LootRecordType.EVENT, metadata, items); } @Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.GAMEMESSAGE && event.getType() != ChatMessageType.SPAM) { return; } final String message = event.getMessage(); if (message.equals(CHEST_LOOTED_MESSAGE) || message.equals(OTHER_CHEST_LOOTED_MESSAGE) || message.equals(DORGESH_KAAN_CHEST_LOOTED_MESSAGE) || message.startsWith(GRUBBY_CHEST_LOOTED_MESSAGE) || LARRAN_LOOTED_PATTERN.matcher(message).matches() || ROGUES_CHEST_PATTERN.matcher(message).matches()) { final int regionID = client.getLocalPlayer().getWorldLocation().getRegionID(); if (!CHEST_EVENT_TYPES.containsKey(regionID)) { return; } onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, CHEST_EVENT_TYPES.get(regionID))); return; } if (message.equals(COFFIN_LOOTED_MESSAGE) && isPlayerWithinMapRegion(HALLOWED_SEPULCHRE_MAP_REGIONS)) { onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HALLOWED_SEPULCHRE_COFFIN_EVENT)); return; } if (message.equals(HERBIBOAR_LOOTED_MESSAGE)) { if (processHerbiboarHerbSackLoot(event.getTimestamp())) { return; } onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HERBIBOAR_EVENT, client.getBoostedSkillLevel(Skill.HERBLORE))); return; } final int regionID = client.getLocalPlayer().getWorldLocation().getRegionID(); if (HESPORI_REGION == regionID && message.equals(HESPORI_LOOTED_MESSAGE)) { onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HESPORI_EVENT)); return; } final Matcher hamStoreroomMatcher = HAM_CHEST_LOOTED_PATTERN.matcher(message); if (hamStoreroomMatcher.matches() && regionID == HAM_STOREROOM_REGION) { String keyType = hamStoreroomMatcher.group("key"); onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, String.format("H.A.M. chest (%s)", keyType))); return; } final Matcher pickpocketMatcher = PICKPOCKET_REGEX.matcher(message); if (pickpocketMatcher.matches()) { // Get the target's name as listed in the chat box String pickpocketTarget = WordUtils.capitalize(pickpocketMatcher.group("target")); // Occasional edge case where the pickpocket message doesn't list the correct name of the NPC (e.g. H.A.M. Members) if (PICKPOCKET_DISAMBIGUATION_MAP.get(lastPickpocketTarget).contains(pickpocketTarget)) { pickpocketTarget = lastPickpocketTarget; } onInvChange(collectInvAndGroundItems(LootRecordType.PICKPOCKET, pickpocketTarget)); return; } // Check if message is for a clue scroll reward final Matcher m = CLUE_SCROLL_PATTERN.matcher(Text.removeTags(message)); if (m.find()) { final String type = m.group(1).toLowerCase(); String eventType; switch (type) { case "beginner": eventType = "Clue Scroll (Beginner)"; break; case "easy": eventType = "Clue Scroll (Easy)"; break; case "medium": eventType = "Clue Scroll (Medium)"; break; case "hard": eventType = "Clue Scroll (Hard)"; break; case "elite": eventType = "Clue Scroll (Elite)"; break; case "master": eventType = "Clue Scroll (Master)"; break; default: log.debug("Unrecognized clue type: {}", type); return; } // Clue Scrolls use same InventoryID as Barrows onInvChange(InventoryID.BARROWS_REWARD, collectInvItems(LootRecordType.EVENT, eventType)); return; } if (SHADE_CHEST_NO_KEY_PATTERN.matcher(message).matches()) { // Player didn't have the key they needed. resetEvent(); return; } // Check if message is a birdhouse type final Matcher matcher = BIRDHOUSE_PATTERN.matcher(message); if (matcher.matches()) { final int xp = Integer.parseInt(matcher.group(1)); final String type = BIRDHOUSE_XP_TO_TYPE.get(xp); if (type == null) { log.debug("Unknown bird house type {}", xp); return; } onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, type, client.getBoostedSkillLevel(Skill.HUNTER))); return; } if (regionID == TEMPOROSS_REGION && message.startsWith(TEMPOROSS_LOOT_STRING)) { onInvChange(collectInvItems(LootRecordType.EVENT, TEMPOROSS_EVENT, client.getBoostedSkillLevel(Skill.FISHING))); return; } if (regionID == GUARDIANS_OF_THE_RIFT_REGION && message.startsWith(GUARDIANS_OF_THE_RIFT_LOOT_STRING)) { onInvChange(collectInvItems(LootRecordType.EVENT, GUARDIANS_OF_THE_RIFT_EVENT, client.getBoostedSkillLevel(Skill.RUNECRAFT))); return; } if (message.equals(IMPLING_CATCH_MESSAGE)) { onInvChange(collectInvItems(LootRecordType.EVENT, client.getLocalPlayer().getInteracting().getName())); return; } } @Subscribe public void onItemContainerChanged(ItemContainerChanged event) { // when the wilderness chest empties, clear chest loot flag for the next key if (event.getContainerId() == InventoryID.WILDERNESS_LOOT_CHEST.getId() && Arrays.stream(event.getItemContainer().getItems()).noneMatch(i -> i.getId() > -1)) { log.debug("Resetting chest loot flag"); chestLooted = false; } if (inventoryId == null || event.getContainerId() != inventoryId.getId()) { return; } final ItemContainer inventoryContainer = event.getItemContainer(); Multiset<Integer> currentInventory = HashMultiset.create(); Arrays.stream(inventoryContainer.getItems()) .forEach(item -> currentInventory.add(item.getId(), item.getQuantity())); WorldPoint playerLocation = client.getLocalPlayer().getWorldLocation(); final Collection<ItemStack> groundItems = lootManager.getItemSpawns(playerLocation); final Multiset<Integer> diff = Multisets.difference(currentInventory, inventorySnapshot); final Multiset<Integer> diffr = Multisets.difference(inventorySnapshot, currentInventory); final List<ItemStack> items = diff.entrySet().stream() .map(e -> new ItemStack(e.getElement(), e.getCount(), client.getLocalPlayer().getLocalLocation())) .collect(Collectors.toList()); log.debug("Inv change: {} Ground items: {}", items, groundItems); if (inventorySnapshotCb != null) { inventorySnapshotCb.accept(items, groundItems, diffr); } inventoryId = null; inventorySnapshot = null; inventorySnapshotCb = null; } @Subscribe public void onMenuOptionClicked(MenuOptionClicked event) { // There are some pickpocket targets who show up in the chat box with a different name (e.g. H.A.M. members -> man/woman) // We use the value selected from the right-click menu as a fallback for the event lookup in those cases. if (isNPCOp(event.getMenuAction()) && event.getMenuOption().equals("Pickpocket")) { lastPickpocketTarget = Text.removeTags(event.getMenuTarget()); } else if (isObjectOp(event.getMenuAction()) && event.getMenuOption().equals("Open") && SHADE_CHEST_OBJECTS.containsKey(event.getId())) { onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, SHADE_CHEST_OBJECTS.get(event.getId()))); } else if (event.isItemOp()) { if (event.getItemId() == ItemID.SEED_PACK && (event.getMenuOption().equals("Take") || event.getMenuOption().equals("Take-all"))) { onInvChange(collectInvItems(LootRecordType.EVENT, SEEDPACK_EVENT)); } else if (event.getMenuOption().equals("Search") && BIRDNEST_IDS.contains(event.getItemId())) { onInvChange(collectInvItems(LootRecordType.EVENT, BIRDNEST_EVENT)); } else if (event.getMenuOption().equals("Open")) { switch (event.getItemId()) { case ItemID.CASKET: onInvChange(collectInvItems(LootRecordType.EVENT, CASKET_EVENT)); break; case ItemID.SUPPLY_CRATE: case ItemID.EXTRA_SUPPLY_CRATE: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, WINTERTODT_SUPPLY_CRATE_EVENT)); break; case ItemID.SPOILS_OF_WAR: onInvChange(collectInvItems(LootRecordType.EVENT, SPOILS_OF_WAR_EVENT)); break; case ItemID.CASKET_25590: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, TEMPOROSS_CASKET_EVENT)); break; case ItemID.INTRICATE_POUCH: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, INTRICATE_POUCH_EVENT)); break; case ItemID.SIMPLE_LOCKBOX_25647: case ItemID.ELABORATE_LOCKBOX_25649: case ItemID.ORNATE_LOCKBOX_25651: case ItemID.CACHE_OF_RUNES: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, itemManager.getItemComposition(event.getItemId()).getName())); break; case ItemID.SUPPLY_CRATE_24884: onInvChange(collectInvItems(LootRecordType.EVENT, MAHOGANY_CRATE_EVENT, client.getBoostedSkillLevel(Skill.CONSTRUCTION))); break; case ItemID.HALLOWED_SACK: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HALLOWED_SACK_EVENT)); break; } } else if (event.getMenuOption().equals("Loot") && IMPLING_JARS.contains(event.getItemId())) { final int itemId = event.getItemId(); onInvChange(((invItems, groundItems, removedItems) -> { int cnt = removedItems.count(itemId); if (cnt > 0) { String name = itemManager.getItemComposition(itemId).getMembersName(); addLoot(name, -1, LootRecordType.EVENT, null, invItems, cnt); } })); } } } private static boolean isNPCOp(MenuAction menuAction) { final int id = menuAction.getId(); return id >= MenuAction.NPC_FIRST_OPTION.getId() && id <= MenuAction.NPC_FIFTH_OPTION.getId(); } private static boolean isObjectOp(MenuAction menuAction) { final int id = menuAction.getId(); return (id >= MenuAction.GAME_OBJECT_FIRST_OPTION.getId() && id <= MenuAction.GAME_OBJECT_FOURTH_OPTION.getId()) || id == MenuAction.GAME_OBJECT_FIFTH_OPTION.getId(); } @Schedule( period = 5, unit = ChronoUnit.MINUTES, asynchronous = true ) public void submitLootTask() { submitLoot(); } @Nullable private CompletableFuture<Void> submitLoot() { List<LootRecord> copy; synchronized (queuedLoots) { if (queuedLoots.isEmpty()) { return null; } copy = new ArrayList<>(queuedLoots); queuedLoots.clear(); } saveLoot(copy); log.debug("Submitting {} loot records", copy.size()); return lootTrackerClient.submit(copy); } private Collection<ConfigLoot> combine(List<LootRecord> records) { Map<ConfigLoot, ConfigLoot> map = new HashMap<>(); for (LootRecord record : records) { ConfigLoot key = new ConfigLoot(record.getType(), record.getEventId()); ConfigLoot loot = map.computeIfAbsent(key, k -> key); loot.kills++; for (GameItem item : record.getDrops()) { loot.add(item.getId(), item.getQty()); } } return map.values(); } private void saveLoot(List<LootRecord> records) { Instant now = Instant.now(); Collection<ConfigLoot> combinedRecords = combine(records); for (ConfigLoot record : combinedRecords) { ConfigLoot lootConfig = getLootConfig(record.type, record.name); if (lootConfig == null) { lootConfig = record; } else { lootConfig.kills += record.kills; for (int i = 0; i < record.drops.length; i += 2) { lootConfig.add(record.drops[i], record.drops[i + 1]); } } lootConfig.last = now; setLootConfig(lootConfig.type, lootConfig.name, lootConfig); } } private void resetEvent() { inventoryId = null; inventorySnapshot = null; inventorySnapshotCb = null; } @FunctionalInterface interface InvChangeCallback { void accept(Collection<ItemStack> invItems, Collection<ItemStack> groundItems, Multiset<Integer> removedItems); } private InvChangeCallback collectInvItems(LootRecordType type, String event) { return collectInvItems(type, event, null); } private InvChangeCallback collectInvItems(LootRecordType type, String event, Object metadata) { return (invItems, groundItems, removedItems) -> addLoot(event, -1, type, metadata, invItems); } private InvChangeCallback collectInvAndGroundItems(LootRecordType type, String event) { return collectInvAndGroundItems(type, event, null); } private InvChangeCallback collectInvAndGroundItems(LootRecordType type, String event, Object metadata) { return (invItems, groundItems, removedItems) -> { List<ItemStack> combined = new ArrayList<>(); combined.addAll(invItems); combined.addAll(groundItems); addLoot(event, -1, type, metadata, combined); }; } private void onInvChange(InvChangeCallback cb) { onInvChange(InventoryID.INVENTORY, cb); } private void onInvChange(InventoryID inv, InvChangeCallback cb) { inventoryId = inv; inventorySnapshot = HashMultiset.create(); inventorySnapshotCb = cb; final ItemContainer itemContainer = client.getItemContainer(inv); if (itemContainer != null) { Arrays.stream(itemContainer.getItems()) .forEach(item -> inventorySnapshot.add(item.getId(), item.getQuantity())); } } private boolean processHerbiboarHerbSackLoot(int timestamp) { List<ItemStack> herbs = new ArrayList<>(); for (MessageNode messageNode : client.getMessages()) { if (messageNode.getTimestamp() != timestamp || messageNode.getType() != ChatMessageType.SPAM) { continue; } Matcher matcher = HERBIBOAR_HERB_SACK_PATTERN.matcher(messageNode.getValue()); if (matcher.matches()) { herbs.add(new ItemStack(itemManager.search(matcher.group(1)).get(0).getId(), 1, client.getLocalPlayer().getLocalLocation())); } } if (herbs.isEmpty()) { return false; } int herbloreLevel = client.getBoostedSkillLevel(Skill.HERBLORE); addLoot(HERBIBOAR_EVENT, -1, LootRecordType.EVENT, herbloreLevel, herbs); return true; } void toggleItem(String name, boolean ignore) { final Set<String> ignoredItemSet = new LinkedHashSet<>(ignoredItems); if (ignore) { ignoredItemSet.add(name); } else { ignoredItemSet.remove(name); } config.setIgnoredItems(Text.toCSV(ignoredItemSet)); // the config changed will update the panel } boolean isIgnored(String name) { return ignoredItems.contains(name); } void toggleEvent(String name, boolean ignore) { final Set<String> ignoredSet = new LinkedHashSet<>(ignoredEvents); if (ignore) { ignoredSet.add(name); } else { ignoredSet.remove(name); } config.setIgnoredEvents(Text.toCSV(ignoredSet)); // the config changed will update the panel } boolean isEventIgnored(String name) { return ignoredEvents.contains(name); } private LootTrackerItem buildLootTrackerItem(int itemId, int quantity) { final ItemComposition itemComposition = itemManager.getItemComposition(itemId); final int gePrice = itemManager.getItemPrice(itemId); final int haPrice = itemComposition.getHaPrice(); final boolean ignored = ignoredItems.contains(itemComposition.getMembersName()); return new LootTrackerItem( itemId, itemComposition.getMembersName(), quantity, gePrice, haPrice, ignored); } private LootTrackerItem[] buildEntries(final Collection<ItemStack> itemStacks) { return itemStacks.stream() .map(itemStack -> buildLootTrackerItem(itemStack.getId(), itemStack.getQuantity())) .toArray(LootTrackerItem[]::new); } private static Collection<GameItem> toGameItems(Collection<ItemStack> items) { return items.stream() .map(item -> new GameItem(item.getId(), item.getQuantity())) .collect(Collectors.toList()); } private Collection<LootTrackerRecord> convertToLootTrackerRecord(final Collection<LootAggregate> records) { return records.stream() .sorted(Comparator.comparing(LootAggregate::getLast_time)) .map(record -> { LootTrackerItem[] drops = record.getDrops().stream().map(itemStack -> buildLootTrackerItem(itemStack.getId(), itemStack.getQty()) ).toArray(LootTrackerItem[]::new); return new LootTrackerRecord(record.getEventId(), "", record.getType(), drops, record.getAmount()); }) .collect(Collectors.toCollection(ArrayList::new)); } private LootTrackerRecord convertToLootTrackerRecord(final ConfigLoot configLoot) { LootTrackerItem[] items = new LootTrackerItem[configLoot.drops.length / 2]; for (int i = 0; i < configLoot.drops.length; i += 2) { int id = configLoot.drops[i]; int qty = configLoot.drops[i + 1]; items[i >> 1] = buildLootTrackerItem(id, qty); } return new LootTrackerRecord(configLoot.name, "", configLoot.type, items, configLoot.kills); } /** * Is player currently within the provided map regions */ private boolean isPlayerWithinMapRegion(Set<Integer> definedMapRegions) { final int[] mapRegions = client.getMapRegions(); for (int region : mapRegions) { if (definedMapRegions.contains(region)) { return true; } } return false; } private void lootReceivedChatMessage(final Collection<ItemStack> items, final String name) { long totalPrice = items.stream() .mapToLong(is -> (long) itemManager.getItemPrice(is.getId()) * is.getQuantity()) .sum(); final String message = new ChatMessageBuilder() .append(ChatColorType.HIGHLIGHT) .append("You've killed ") .append(name) .append(" for ") .append(QuantityFormatter.quantityToStackSize(totalPrice)) .append(" loot.") .build(); chatMessageManager.queue( QueuedMessage.builder() .type(ChatMessageType.CONSOLE) .runeLiteFormattedMessage(message) .build()); } ConfigLoot getLootConfig(LootRecordType type, String name) { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to get loot with no profile!"); return null; } String json = configManager.getConfiguration(LootTrackerConfig.GROUP, profile, "drops_" + type + "_" + name); if (json == null) { return null; } return gson.fromJson(json, ConfigLoot.class); } void setLootConfig(LootRecordType type, String name, ConfigLoot loot) { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to set loot with no profile!"); return; } String json = gson.toJson(loot); configManager.setConfiguration(LootTrackerConfig.GROUP, profile, "drops_" + type + "_" + name, json); } void removeLootConfig(LootRecordType type, String name) { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to remove loot with no profile!"); return; } configManager.unsetConfiguration(LootTrackerConfig.GROUP, profile, "drops_" + type + "_" + name); } void removeAllLoot() { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to clear loot with no profile!"); return; } for (String key : configManager.getRSProfileConfigurationKeys(LootTrackerConfig.GROUP, profile, "drops_")) { configManager.unsetConfiguration(LootTrackerConfig.GROUP, profile, key); } } }
runelite-client/src/main/java/net/runelite/client/plugins/loottracker/LootTrackerPlugin.java
/* * Copyright (c) 2018, Psikoi <https://github.com/psikoi> * Copyright (c) 2018, Adam <[email protected]> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.loottracker; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.HashMultiset; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.common.collect.Multiset; import com.google.common.collect.Multisets; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import com.google.inject.Provides; import java.awt.image.BufferedImage; import java.time.Duration; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.annotation.Nullable; import javax.inject.Inject; import javax.swing.SwingUtilities; import lombok.AccessLevel; import lombok.Getter; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import net.runelite.api.ChatMessageType; import net.runelite.api.Client; import net.runelite.api.GameState; import net.runelite.api.InventoryID; import net.runelite.api.ItemComposition; import net.runelite.api.ItemContainer; import net.runelite.api.ItemID; import net.runelite.api.MenuAction; import net.runelite.api.MessageNode; import net.runelite.api.NPC; import net.runelite.api.ObjectID; import net.runelite.api.Player; import net.runelite.api.Skill; import net.runelite.api.SpriteID; import net.runelite.api.WorldType; import net.runelite.api.coords.WorldPoint; import net.runelite.api.events.ChatMessage; import net.runelite.api.events.GameStateChanged; import net.runelite.api.events.ItemContainerChanged; import net.runelite.api.events.MenuOptionClicked; import net.runelite.api.events.WidgetLoaded; import net.runelite.api.widgets.WidgetID; import net.runelite.client.account.AccountSession; import net.runelite.client.account.SessionManager; import net.runelite.client.callback.ClientThread; import net.runelite.client.chat.ChatColorType; import net.runelite.client.chat.ChatMessageBuilder; import net.runelite.client.chat.ChatMessageManager; import net.runelite.client.chat.QueuedMessage; import net.runelite.client.config.ConfigManager; import net.runelite.client.eventbus.EventBus; import net.runelite.client.eventbus.Subscribe; import net.runelite.client.events.ClientShutdown; import net.runelite.client.events.ConfigChanged; import net.runelite.client.events.NpcLootReceived; import net.runelite.client.events.PlayerLootReceived; import net.runelite.client.events.RuneScapeProfileChanged; import net.runelite.client.events.SessionClose; import net.runelite.client.events.SessionOpen; import net.runelite.client.game.ItemManager; import net.runelite.client.game.ItemStack; import net.runelite.client.game.LootManager; import net.runelite.client.game.SpriteManager; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.task.Schedule; import net.runelite.client.ui.ClientToolbar; import net.runelite.client.ui.NavigationButton; import net.runelite.client.util.ImageUtil; import net.runelite.client.util.QuantityFormatter; import net.runelite.client.util.Text; import net.runelite.http.api.loottracker.GameItem; import net.runelite.http.api.loottracker.LootAggregate; import net.runelite.http.api.loottracker.LootRecord; import net.runelite.http.api.loottracker.LootRecordType; import org.apache.commons.text.WordUtils; @PluginDescriptor( name = "Loot Tracker", description = "Tracks loot from monsters and minigames", tags = {"drops"} ) @Slf4j public class LootTrackerPlugin extends Plugin { private static final int MAX_DROPS = 1024; private static final Duration MAX_AGE = Duration.ofDays(365L); // Activity/Event loot handling private static final Pattern CLUE_SCROLL_PATTERN = Pattern.compile("You have completed [0-9]+ ([a-z]+) Treasure Trails?\\."); private static final int THEATRE_OF_BLOOD_REGION = 12867; private static final int THEATRE_OF_BLOOD_LOBBY = 14642; // Herbiboar loot handling @VisibleForTesting static final String HERBIBOAR_LOOTED_MESSAGE = "You harvest herbs from the herbiboar, whereupon it escapes."; private static final String HERBIBOAR_EVENT = "Herbiboar"; private static final Pattern HERBIBOAR_HERB_SACK_PATTERN = Pattern.compile(".+(Grimy .+?) herb.+"); // Seed Pack loot handling private static final String SEEDPACK_EVENT = "Seed pack"; // Hespori loot handling private static final String HESPORI_LOOTED_MESSAGE = "You have successfully cleared this patch for new crops."; private static final String HESPORI_EVENT = "Hespori"; private static final int HESPORI_REGION = 5021; // Chest loot handling private static final String CHEST_LOOTED_MESSAGE = "You find some treasure in the chest!"; private static final Pattern ROGUES_CHEST_PATTERN = Pattern.compile("You find (a|some)([a-z\\s]*) inside."); private static final Pattern LARRAN_LOOTED_PATTERN = Pattern.compile("You have opened Larran's (big|small) chest .*"); // Used by Stone Chest, Isle of Souls chest, Dark Chest private static final String OTHER_CHEST_LOOTED_MESSAGE = "You steal some loot from the chest."; private static final String DORGESH_KAAN_CHEST_LOOTED_MESSAGE = "You find treasure inside!"; private static final String GRUBBY_CHEST_LOOTED_MESSAGE = "You have opened the Grubby Chest"; private static final Pattern HAM_CHEST_LOOTED_PATTERN = Pattern.compile("Your (?<key>[a-z]+) key breaks in the lock.*"); private static final int HAM_STOREROOM_REGION = 10321; private static final Map<Integer, String> CHEST_EVENT_TYPES = new ImmutableMap.Builder<Integer, String>(). put(5179, "Brimstone Chest"). put(11573, "Crystal Chest"). put(12093, "Larran's big chest"). put(12127, "The Gauntlet"). put(13113, "Larran's small chest"). put(13151, "Elven Crystal Chest"). put(5277, "Stone chest"). put(10835, "Dorgesh-Kaan Chest"). put(10834, "Dorgesh-Kaan Chest"). put(7323, "Grubby Chest"). put(8593, "Isle of Souls Chest"). put(7827, "Dark Chest"). put(13117, "Rogues' Chest"). build(); // Shade chest loot handling private static final Pattern SHADE_CHEST_NO_KEY_PATTERN = Pattern.compile("You need a [a-z]+ key with a [a-z]+ trim to open this chest .*"); private static final Map<Integer, String> SHADE_CHEST_OBJECTS = new ImmutableMap.Builder<Integer, String>(). put(ObjectID.BRONZE_CHEST, "Bronze key red"). put(ObjectID.BRONZE_CHEST_4112, "Bronze key brown"). put(ObjectID.BRONZE_CHEST_4113, "Bronze key crimson"). put(ObjectID.BRONZE_CHEST_4114, "Bronze key black"). put(ObjectID.BRONZE_CHEST_4115, "Bronze key purple"). put(ObjectID.STEEL_CHEST, "Steel key red"). put(ObjectID.STEEL_CHEST_4117, "Steel key brown"). put(ObjectID.STEEL_CHEST_4118, "Steel key crimson"). put(ObjectID.STEEL_CHEST_4119, "Steel key black"). put(ObjectID.STEEL_CHEST_4120, "Steel key purple"). put(ObjectID.BLACK_CHEST, "Black key red"). put(ObjectID.BLACK_CHEST_4122, "Black key brown"). put(ObjectID.BLACK_CHEST_4123, "Black key crimson"). put(ObjectID.BLACK_CHEST_4124, "Black key black"). put(ObjectID.BLACK_CHEST_4125, "Black key purple"). put(ObjectID.SILVER_CHEST, "Silver key red"). put(ObjectID.SILVER_CHEST_4127, "Silver key brown"). put(ObjectID.SILVER_CHEST_4128, "Silver key crimson"). put(ObjectID.SILVER_CHEST_4129, "Silver key black"). put(ObjectID.SILVER_CHEST_4130, "Silver key purple"). put(ObjectID.GOLD_CHEST, "Gold key red"). put(ObjectID.GOLD_CHEST_41213, "Gold key brown"). put(ObjectID.GOLD_CHEST_41214, "Gold key crimson"). put(ObjectID.GOLD_CHEST_41215, "Gold key black"). put(ObjectID.GOLD_CHEST_41216, "Gold key purple"). build(); // Hallow Sepulchre Coffin handling private static final String COFFIN_LOOTED_MESSAGE = "You push the coffin lid aside."; private static final String HALLOWED_SEPULCHRE_COFFIN_EVENT = "Coffin (Hallowed Sepulchre)"; private static final Set<Integer> HALLOWED_SEPULCHRE_MAP_REGIONS = ImmutableSet.of(8797, 10077, 9308, 10074, 9050); // one map region per floor private static final String HALLOWED_SACK_EVENT = "Hallowed Sack"; // Last man standing map regions private static final Set<Integer> LAST_MAN_STANDING_REGIONS = ImmutableSet.of(13658, 13659, 13660, 13914, 13915, 13916, 13918, 13919, 13920, 14174, 14175, 14176, 14430, 14431, 14432); private static final Pattern PICKPOCKET_REGEX = Pattern.compile("You pick (the )?(?<target>.+)'s? pocket.*"); private static final String BIRDNEST_EVENT = "Bird nest"; private static final Set<Integer> BIRDNEST_IDS = ImmutableSet.of(ItemID.BIRD_NEST, ItemID.BIRD_NEST_5071, ItemID.BIRD_NEST_5072, ItemID.BIRD_NEST_5073, ItemID.BIRD_NEST_5074, ItemID.BIRD_NEST_7413, ItemID.BIRD_NEST_13653, ItemID.BIRD_NEST_22798, ItemID.BIRD_NEST_22800); // Birdhouses private static final Pattern BIRDHOUSE_PATTERN = Pattern.compile("You dismantle and discard the trap, retrieving (?:(?:a|\\d{1,2}) nests?, )?10 dead birds, \\d{1,3} feathers and (\\d,?\\d{1,3}) Hunter XP\\."); private static final Map<Integer, String> BIRDHOUSE_XP_TO_TYPE = new ImmutableMap.Builder<Integer, String>(). put(280, "Regular Bird House"). put(420, "Oak Bird House"). put(560, "Willow Bird House"). put(700, "Teak Bird House"). put(820, "Maple Bird House"). put(960, "Mahogany Bird House"). put(1020, "Yew Bird House"). put(1140, "Magic Bird House"). put(1200, "Redwood Bird House"). build(); /* * This map is used when a pickpocket target has a different name in the chat message than their in-game name. * Note that if the two NPCs can be found in the same place, there is a chance of race conditions * occurring when changing targets mid-pickpocket, in which case a different solution may need to be considered. */ private static final Multimap<String, String> PICKPOCKET_DISAMBIGUATION_MAP = ImmutableMultimap.of( "H.A.M. Member", "Man", "H.A.M. Member", "Woman" ); private static final String CASKET_EVENT = "Casket"; private static final String WINTERTODT_SUPPLY_CRATE_EVENT = "Supply crate (Wintertodt)"; // Soul Wars private static final String SPOILS_OF_WAR_EVENT = "Spoils of war"; private static final Set<Integer> SOUL_WARS_REGIONS = ImmutableSet.of(8493, 8749, 9005); // Tempoross private static final String TEMPOROSS_EVENT = "Reward pool (Tempoross)"; private static final String TEMPOROSS_CASKET_EVENT = "Casket (Tempoross)"; private static final String TEMPOROSS_LOOT_STRING = "You found some loot: "; private static final int TEMPOROSS_REGION = 12588; // Guardians of the Rift private static final String GUARDIANS_OF_THE_RIFT_EVENT = "Guardians of the Rift"; private static final String INTRICATE_POUCH_EVENT = "Intricate pouch"; private static final String GUARDIANS_OF_THE_RIFT_LOOT_STRING = "You found some loot: "; private static final int GUARDIANS_OF_THE_RIFT_REGION = 14484; // Mahogany Homes private static final String MAHOGANY_CRATE_EVENT = "Supply crate (Mahogany Homes)"; // Implings private static final Set<Integer> IMPLING_JARS = ImmutableSet.of( ItemID.BABY_IMPLING_JAR, ItemID.YOUNG_IMPLING_JAR, ItemID.GOURMET_IMPLING_JAR, ItemID.EARTH_IMPLING_JAR, ItemID.ESSENCE_IMPLING_JAR, ItemID.ECLECTIC_IMPLING_JAR, ItemID.NATURE_IMPLING_JAR, ItemID.MAGPIE_IMPLING_JAR, ItemID.NINJA_IMPLING_JAR, ItemID.CRYSTAL_IMPLING_JAR, ItemID.DRAGON_IMPLING_JAR, ItemID.LUCKY_IMPLING_JAR ); private static final String IMPLING_CATCH_MESSAGE = "You manage to catch the impling and acquire some loot."; // Raids private static final String CHAMBERS_OF_XERIC = "Chambers of Xeric"; private static final String THEATRE_OF_BLOOD = "Theatre of Blood"; private static final String TOMBS_OF_AMASCUT = "Tombs of Amascut"; private static final Set<Character> VOWELS = ImmutableSet.of('a', 'e', 'i', 'o', 'u'); @Inject private ClientToolbar clientToolbar; @Inject private ItemManager itemManager; @Inject private SpriteManager spriteManager; @Inject private LootTrackerConfig config; @Inject private Client client; @Inject private ClientThread clientThread; @Inject private SessionManager sessionManager; @Inject private ScheduledExecutorService executor; @Inject private EventBus eventBus; @Inject private ChatMessageManager chatMessageManager; @Inject private LootManager lootManager; @Inject private ConfigManager configManager; @Inject private Gson gson; @Getter(AccessLevel.PACKAGE) @Inject private LootTrackerClient lootTrackerClient; private LootTrackerPanel panel; private NavigationButton navButton; private boolean chestLooted; private String lastPickpocketTarget; private List<String> ignoredItems = new ArrayList<>(); private List<String> ignoredEvents = new ArrayList<>(); private InventoryID inventoryId; private Multiset<Integer> inventorySnapshot; private InvChangeCallback inventorySnapshotCb; private final List<LootRecord> queuedLoots = new ArrayList<>(); private String profileKey; private static Collection<ItemStack> stack(Collection<ItemStack> items) { final List<ItemStack> list = new ArrayList<>(); for (final ItemStack item : items) { int quantity = 0; for (final ItemStack i : list) { if (i.getId() == item.getId()) { quantity = i.getQuantity(); list.remove(i); break; } } if (quantity > 0) { list.add(new ItemStack(item.getId(), item.getQuantity() + quantity, item.getLocation())); } else { list.add(item); } } return list; } @Provides LootTrackerConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(LootTrackerConfig.class); } @Subscribe public void onSessionOpen(SessionOpen sessionOpen) { AccountSession accountSession = sessionManager.getAccountSession(); if (accountSession.getUuid() != null) { lootTrackerClient.setUuid(accountSession.getUuid()); } else { lootTrackerClient.setUuid(null); } } @Subscribe public void onSessionClose(SessionClose sessionClose) { submitLoot(); lootTrackerClient.setUuid(null); } @Subscribe public void onRuneScapeProfileChanged(RuneScapeProfileChanged e) { final String profileKey = configManager.getRSProfileKey(); if (profileKey == null) { return; } if (profileKey.equals(this.profileKey)) { return; } switchProfile(profileKey); } private void switchProfile(String profileKey) { executor.execute(() -> { // Current queued loot is for the previous profile, so save it first with the current profile key submitLoot(); this.profileKey = profileKey; log.debug("Switched to profile {}", profileKey); if (!config.syncPanel()) { return; } int drops = 0; List<ConfigLoot> loots = new ArrayList<>(); Instant old = Instant.now().minus(MAX_AGE); for (String key : configManager.getRSProfileConfigurationKeys(LootTrackerConfig.GROUP, profileKey, "drops_")) { String json = configManager.getConfiguration(LootTrackerConfig.GROUP, profileKey, key); ConfigLoot configLoot; try { configLoot = gson.fromJson(json, ConfigLoot.class); } catch (JsonSyntaxException ex) { log.warn("Removing loot with malformed json: {}", json, ex); configManager.unsetConfiguration(LootTrackerConfig.GROUP, profileKey, key); continue; } if (configLoot.last.isBefore(old)) { log.debug("Removing old loot for {} {}", configLoot.type, configLoot.name); configManager.unsetConfiguration(LootTrackerConfig.GROUP, profileKey, key); continue; } if (drops >= MAX_DROPS && !loots.isEmpty() && loots.get(0).last.isAfter(configLoot.last)) { // fast drop continue; } sortedInsert(loots, configLoot, Comparator.comparing(ConfigLoot::getLast)); drops += configLoot.numDrops(); if (drops >= MAX_DROPS) { ConfigLoot top = loots.remove(0); drops -= top.numDrops(); } } log.debug("Loaded {} records", loots.size()); clientThread.invokeLater(() -> { // convertToLootTrackerRecord requires item compositions to be available to get the item name, // so it can't be run while the client is starting if (client.getGameState().getState() < GameState.LOGIN_SCREEN.getState()) { return false; } // convertToLootTrackerRecord must be called on client thread List<LootTrackerRecord> records = loots.stream() .map(this::convertToLootTrackerRecord) .collect(Collectors.toList()); SwingUtilities.invokeLater(() -> { panel.clearRecords(); panel.addRecords(records); }); return true; }); }); } private static <T> void sortedInsert(List<T> list, T value, Comparator<? super T> c) { int idx = Collections.binarySearch(list, value, c); list.add(idx < 0 ? -idx - 1 : idx, value); } @Subscribe public void onConfigChanged(ConfigChanged event) { if (event.getGroup().equals(LootTrackerConfig.GROUP)) { if ("ignoredItems".equals(event.getKey()) || "ignoredEvents".equals(event.getKey())) { ignoredItems = Text.fromCSV(config.getIgnoredItems()); ignoredEvents = Text.fromCSV(config.getIgnoredEvents()); SwingUtilities.invokeLater(panel::updateIgnoredRecords); } } } @Override protected void startUp() throws Exception { profileKey = null; ignoredItems = Text.fromCSV(config.getIgnoredItems()); ignoredEvents = Text.fromCSV(config.getIgnoredEvents()); panel = new LootTrackerPanel(this, itemManager, config); spriteManager.getSpriteAsync(SpriteID.TAB_INVENTORY, 0, panel::loadHeaderIcon); final BufferedImage icon = ImageUtil.loadImageResource(getClass(), "panel_icon.png"); navButton = NavigationButton.builder() .tooltip("Loot Tracker") .icon(icon) .priority(5) .panel(panel) .build(); clientToolbar.addNavigation(navButton); AccountSession accountSession = sessionManager.getAccountSession(); if (accountSession != null) { lootTrackerClient.setUuid(accountSession.getUuid()); } String profileKey = configManager.getRSProfileKey(); if (profileKey != null) { switchProfile(profileKey); } } @Override protected void shutDown() { submitLoot(); clientToolbar.removeNavigation(navButton); lootTrackerClient.setUuid(null); chestLooted = false; } @Subscribe public void onClientShutdown(ClientShutdown event) { Future<Void> future = submitLoot(); if (future != null) { event.waitFor(future); } } @Subscribe public void onGameStateChanged(final GameStateChanged event) { if (event.getGameState() == GameState.LOADING && !client.isInInstancedRegion()) { chestLooted = false; } } void addLoot(@NonNull String name, int combatLevel, LootRecordType type, Object metadata, Collection<ItemStack> items) { addLoot(name, combatLevel, type, metadata, items, 1); } void addLoot(@NonNull String name, int combatLevel, LootRecordType type, Object metadata, Collection<ItemStack> items, int amount) { final LootTrackerItem[] entries = buildEntries(stack(items)); SwingUtilities.invokeLater(() -> panel.add(name, type, combatLevel, entries, amount)); LootRecord lootRecord = new LootRecord(name, type, metadata, toGameItems(items), Instant.now(), getLootWorldId()); synchronized (queuedLoots) { queuedLoots.add(lootRecord); } eventBus.post(new LootReceived(name, combatLevel, type, items, amount)); } private Integer getLootWorldId() { // For the wiki to determine drop rates based on dmm brackets / identify leagues drops return client.getWorldType().contains(WorldType.SEASONAL) ? client.getWorld() : null; } @Subscribe public void onNpcLootReceived(final NpcLootReceived npcLootReceived) { final NPC npc = npcLootReceived.getNpc(); final Collection<ItemStack> items = npcLootReceived.getItems(); final String name = npc.getName(); final int combat = npc.getCombatLevel(); addLoot(name, combat, LootRecordType.NPC, npc.getId(), items); if (config.npcKillChatMessage()) { final String prefix = VOWELS.contains(Character.toLowerCase(name.charAt(0))) ? "an" : "a"; lootReceivedChatMessage(items, prefix + ' ' + name); } } @Subscribe public void onPlayerLootReceived(final PlayerLootReceived playerLootReceived) { // Ignore Last Man Standing and Soul Wars player loots if (isPlayerWithinMapRegion(LAST_MAN_STANDING_REGIONS) || isPlayerWithinMapRegion(SOUL_WARS_REGIONS)) { return; } final Player player = playerLootReceived.getPlayer(); final Collection<ItemStack> items = playerLootReceived.getItems(); final String name = player.getName(); final int combat = player.getCombatLevel(); addLoot(name, combat, LootRecordType.PLAYER, null, items); if (config.pvpKillChatMessage()) { lootReceivedChatMessage(items, name); } } @Subscribe public void onWidgetLoaded(WidgetLoaded widgetLoaded) { String event; Object metadata = null; final ItemContainer container; switch (widgetLoaded.getGroupId()) { case (WidgetID.BARROWS_REWARD_GROUP_ID): event = "Barrows"; container = client.getItemContainer(InventoryID.BARROWS_REWARD); break; case (WidgetID.CHAMBERS_OF_XERIC_REWARD_GROUP_ID): if (chestLooted) { return; } event = CHAMBERS_OF_XERIC; container = client.getItemContainer(InventoryID.CHAMBERS_OF_XERIC_CHEST); chestLooted = true; break; case (WidgetID.THEATRE_OF_BLOOD_GROUP_ID): if (chestLooted) { return; } int region = WorldPoint.fromLocalInstance(client, client.getLocalPlayer().getLocalLocation()).getRegionID(); if (region != THEATRE_OF_BLOOD_REGION && region != THEATRE_OF_BLOOD_LOBBY) { return; } event = THEATRE_OF_BLOOD; container = client.getItemContainer(InventoryID.THEATRE_OF_BLOOD_CHEST); chestLooted = true; break; case WidgetID.TOA_REWARD_GROUP_ID: if (chestLooted) { return; } event = TOMBS_OF_AMASCUT; container = client.getItemContainer(InventoryID.TOA_REWARD_CHEST); chestLooted = true; break; case (WidgetID.KINGDOM_GROUP_ID): event = "Kingdom of Miscellania"; container = client.getItemContainer(InventoryID.KINGDOM_OF_MISCELLANIA); break; case (WidgetID.FISHING_TRAWLER_REWARD_GROUP_ID): event = "Fishing Trawler"; metadata = client.getBoostedSkillLevel(Skill.FISHING); container = client.getItemContainer(InventoryID.FISHING_TRAWLER_REWARD); break; case (WidgetID.DRIFT_NET_FISHING_REWARD_GROUP_ID): event = "Drift Net"; metadata = client.getBoostedSkillLevel(Skill.FISHING); container = client.getItemContainer(InventoryID.DRIFT_NET_FISHING_REWARD); break; case WidgetID.WILDERNESS_LOOT_CHEST: if (chestLooted) { return; } event = "Loot Chest"; container = client.getItemContainer(InventoryID.WILDERNESS_LOOT_CHEST); chestLooted = true; break; default: return; } if (container == null) { return; } // Convert container items to array of ItemStack final Collection<ItemStack> items = Arrays.stream(container.getItems()) .filter(item -> item.getId() > 0) .map(item -> new ItemStack(item.getId(), item.getQuantity(), client.getLocalPlayer().getLocalLocation())) .collect(Collectors.toList()); if (config.showRaidsLootValue() && (event.equals(THEATRE_OF_BLOOD) || event.equals(CHAMBERS_OF_XERIC)) || event.equals(TOMBS_OF_AMASCUT)) { long totalValue = items.stream() .filter(item -> item.getId() > -1) .mapToLong(item -> config.priceType() == LootTrackerPriceType.GRAND_EXCHANGE ? (long) itemManager.getItemPrice(item.getId()) * item.getQuantity() : (long) itemManager.getItemComposition(item.getId()).getHaPrice() * item.getQuantity()) .sum(); String chatMessage = new ChatMessageBuilder() .append(ChatColorType.NORMAL) .append("Your loot is worth around ") .append(ChatColorType.HIGHLIGHT) .append(QuantityFormatter.formatNumber(totalValue)) .append(ChatColorType.NORMAL) .append(" coins.") .build(); chatMessageManager.queue(QueuedMessage.builder() .type(ChatMessageType.FRIENDSCHATNOTIFICATION) .runeLiteFormattedMessage(chatMessage) .build()); } if (items.isEmpty()) { log.debug("No items to find for Event: {} | Container: {}", event, container); return; } addLoot(event, -1, LootRecordType.EVENT, metadata, items); } @Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.GAMEMESSAGE && event.getType() != ChatMessageType.SPAM) { return; } final String message = event.getMessage(); if (message.equals(CHEST_LOOTED_MESSAGE) || message.equals(OTHER_CHEST_LOOTED_MESSAGE) || message.equals(DORGESH_KAAN_CHEST_LOOTED_MESSAGE) || message.startsWith(GRUBBY_CHEST_LOOTED_MESSAGE) || LARRAN_LOOTED_PATTERN.matcher(message).matches() || ROGUES_CHEST_PATTERN.matcher(message).matches()) { final int regionID = client.getLocalPlayer().getWorldLocation().getRegionID(); if (!CHEST_EVENT_TYPES.containsKey(regionID)) { return; } onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, CHEST_EVENT_TYPES.get(regionID))); return; } if (message.equals(COFFIN_LOOTED_MESSAGE) && isPlayerWithinMapRegion(HALLOWED_SEPULCHRE_MAP_REGIONS)) { onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HALLOWED_SEPULCHRE_COFFIN_EVENT)); return; } if (message.equals(HERBIBOAR_LOOTED_MESSAGE)) { if (processHerbiboarHerbSackLoot(event.getTimestamp())) { return; } onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HERBIBOAR_EVENT, client.getBoostedSkillLevel(Skill.HERBLORE))); return; } final int regionID = client.getLocalPlayer().getWorldLocation().getRegionID(); if (HESPORI_REGION == regionID && message.equals(HESPORI_LOOTED_MESSAGE)) { onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HESPORI_EVENT)); return; } final Matcher hamStoreroomMatcher = HAM_CHEST_LOOTED_PATTERN.matcher(message); if (hamStoreroomMatcher.matches() && regionID == HAM_STOREROOM_REGION) { String keyType = hamStoreroomMatcher.group("key"); onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, String.format("H.A.M. chest (%s)", keyType))); return; } final Matcher pickpocketMatcher = PICKPOCKET_REGEX.matcher(message); if (pickpocketMatcher.matches()) { // Get the target's name as listed in the chat box String pickpocketTarget = WordUtils.capitalize(pickpocketMatcher.group("target")); // Occasional edge case where the pickpocket message doesn't list the correct name of the NPC (e.g. H.A.M. Members) if (PICKPOCKET_DISAMBIGUATION_MAP.get(lastPickpocketTarget).contains(pickpocketTarget)) { pickpocketTarget = lastPickpocketTarget; } onInvChange(collectInvAndGroundItems(LootRecordType.PICKPOCKET, pickpocketTarget)); return; } // Check if message is for a clue scroll reward final Matcher m = CLUE_SCROLL_PATTERN.matcher(Text.removeTags(message)); if (m.find()) { final String type = m.group(1).toLowerCase(); String eventType; switch (type) { case "beginner": eventType = "Clue Scroll (Beginner)"; break; case "easy": eventType = "Clue Scroll (Easy)"; break; case "medium": eventType = "Clue Scroll (Medium)"; break; case "hard": eventType = "Clue Scroll (Hard)"; break; case "elite": eventType = "Clue Scroll (Elite)"; break; case "master": eventType = "Clue Scroll (Master)"; break; default: log.debug("Unrecognized clue type: {}", type); return; } // Clue Scrolls use same InventoryID as Barrows onInvChange(InventoryID.BARROWS_REWARD, collectInvItems(LootRecordType.EVENT, eventType)); return; } if (SHADE_CHEST_NO_KEY_PATTERN.matcher(message).matches()) { // Player didn't have the key they needed. resetEvent(); return; } // Check if message is a birdhouse type final Matcher matcher = BIRDHOUSE_PATTERN.matcher(message); if (matcher.matches()) { final int xp = Integer.parseInt(matcher.group(1)); final String type = BIRDHOUSE_XP_TO_TYPE.get(xp); if (type == null) { log.debug("Unknown bird house type {}", xp); return; } onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, type, client.getBoostedSkillLevel(Skill.HUNTER))); return; } if (regionID == TEMPOROSS_REGION && message.startsWith(TEMPOROSS_LOOT_STRING)) { onInvChange(collectInvItems(LootRecordType.EVENT, TEMPOROSS_EVENT, client.getBoostedSkillLevel(Skill.FISHING))); return; } if (regionID == GUARDIANS_OF_THE_RIFT_REGION && message.startsWith(GUARDIANS_OF_THE_RIFT_LOOT_STRING)) { onInvChange(collectInvItems(LootRecordType.EVENT, GUARDIANS_OF_THE_RIFT_EVENT, client.getBoostedSkillLevel(Skill.RUNECRAFT))); return; } if (message.equals(IMPLING_CATCH_MESSAGE)) { onInvChange(collectInvItems(LootRecordType.EVENT, client.getLocalPlayer().getInteracting().getName())); return; } } @Subscribe public void onItemContainerChanged(ItemContainerChanged event) { // when the wilderness chest empties, clear chest loot flag for the next key if (event.getContainerId() == InventoryID.WILDERNESS_LOOT_CHEST.getId() && Arrays.stream(event.getItemContainer().getItems()).noneMatch(i -> i.getId() > -1)) { log.debug("Resetting chest loot flag"); chestLooted = false; } if (inventoryId == null || event.getContainerId() != inventoryId.getId()) { return; } final ItemContainer inventoryContainer = event.getItemContainer(); Multiset<Integer> currentInventory = HashMultiset.create(); Arrays.stream(inventoryContainer.getItems()) .forEach(item -> currentInventory.add(item.getId(), item.getQuantity())); WorldPoint playerLocation = client.getLocalPlayer().getWorldLocation(); final Collection<ItemStack> groundItems = lootManager.getItemSpawns(playerLocation); final Multiset<Integer> diff = Multisets.difference(currentInventory, inventorySnapshot); final Multiset<Integer> diffr = Multisets.difference(inventorySnapshot, currentInventory); final List<ItemStack> items = diff.entrySet().stream() .map(e -> new ItemStack(e.getElement(), e.getCount(), client.getLocalPlayer().getLocalLocation())) .collect(Collectors.toList()); log.debug("Inv change: {} Ground items: {}", items, groundItems); if (inventorySnapshotCb != null) { inventorySnapshotCb.accept(items, groundItems, diffr); } inventoryId = null; inventorySnapshot = null; inventorySnapshotCb = null; } @Subscribe public void onMenuOptionClicked(MenuOptionClicked event) { // There are some pickpocket targets who show up in the chat box with a different name (e.g. H.A.M. members -> man/woman) // We use the value selected from the right-click menu as a fallback for the event lookup in those cases. if (isNPCOp(event.getMenuAction()) && event.getMenuOption().equals("Pickpocket")) { lastPickpocketTarget = Text.removeTags(event.getMenuTarget()); } else if (isObjectOp(event.getMenuAction()) && event.getMenuOption().equals("Open") && SHADE_CHEST_OBJECTS.containsKey(event.getId())) { onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, SHADE_CHEST_OBJECTS.get(event.getId()))); } else if (event.isItemOp()) { if (event.getItemId() == ItemID.SEED_PACK && (event.getMenuOption().equals("Take") || event.getMenuOption().equals("Take-all"))) { onInvChange(collectInvItems(LootRecordType.EVENT, SEEDPACK_EVENT)); } else if (event.getMenuOption().equals("Search") && BIRDNEST_IDS.contains(event.getItemId())) { onInvChange(collectInvItems(LootRecordType.EVENT, BIRDNEST_EVENT)); } else if (event.getMenuOption().equals("Open")) { switch (event.getItemId()) { case ItemID.CASKET: onInvChange(collectInvItems(LootRecordType.EVENT, CASKET_EVENT)); break; case ItemID.SUPPLY_CRATE: case ItemID.EXTRA_SUPPLY_CRATE: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, WINTERTODT_SUPPLY_CRATE_EVENT)); break; case ItemID.SPOILS_OF_WAR: onInvChange(collectInvItems(LootRecordType.EVENT, SPOILS_OF_WAR_EVENT)); break; case ItemID.CASKET_25590: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, TEMPOROSS_CASKET_EVENT)); break; case ItemID.INTRICATE_POUCH: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, INTRICATE_POUCH_EVENT)); break; case ItemID.SIMPLE_LOCKBOX_25647: case ItemID.ELABORATE_LOCKBOX_25649: case ItemID.ORNATE_LOCKBOX_25651: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, itemManager.getItemComposition(event.getItemId()).getName())); break; case ItemID.SUPPLY_CRATE_24884: onInvChange(collectInvItems(LootRecordType.EVENT, MAHOGANY_CRATE_EVENT, client.getBoostedSkillLevel(Skill.CONSTRUCTION))); break; case ItemID.HALLOWED_SACK: onInvChange(collectInvAndGroundItems(LootRecordType.EVENT, HALLOWED_SACK_EVENT)); break; } } else if (event.getMenuOption().equals("Loot") && IMPLING_JARS.contains(event.getItemId())) { final int itemId = event.getItemId(); onInvChange(((invItems, groundItems, removedItems) -> { int cnt = removedItems.count(itemId); if (cnt > 0) { String name = itemManager.getItemComposition(itemId).getMembersName(); addLoot(name, -1, LootRecordType.EVENT, null, invItems, cnt); } })); } } } private static boolean isNPCOp(MenuAction menuAction) { final int id = menuAction.getId(); return id >= MenuAction.NPC_FIRST_OPTION.getId() && id <= MenuAction.NPC_FIFTH_OPTION.getId(); } private static boolean isObjectOp(MenuAction menuAction) { final int id = menuAction.getId(); return (id >= MenuAction.GAME_OBJECT_FIRST_OPTION.getId() && id <= MenuAction.GAME_OBJECT_FOURTH_OPTION.getId()) || id == MenuAction.GAME_OBJECT_FIFTH_OPTION.getId(); } @Schedule( period = 5, unit = ChronoUnit.MINUTES, asynchronous = true ) public void submitLootTask() { submitLoot(); } @Nullable private CompletableFuture<Void> submitLoot() { List<LootRecord> copy; synchronized (queuedLoots) { if (queuedLoots.isEmpty()) { return null; } copy = new ArrayList<>(queuedLoots); queuedLoots.clear(); } saveLoot(copy); log.debug("Submitting {} loot records", copy.size()); return lootTrackerClient.submit(copy); } private Collection<ConfigLoot> combine(List<LootRecord> records) { Map<ConfigLoot, ConfigLoot> map = new HashMap<>(); for (LootRecord record : records) { ConfigLoot key = new ConfigLoot(record.getType(), record.getEventId()); ConfigLoot loot = map.computeIfAbsent(key, k -> key); loot.kills++; for (GameItem item : record.getDrops()) { loot.add(item.getId(), item.getQty()); } } return map.values(); } private void saveLoot(List<LootRecord> records) { Instant now = Instant.now(); Collection<ConfigLoot> combinedRecords = combine(records); for (ConfigLoot record : combinedRecords) { ConfigLoot lootConfig = getLootConfig(record.type, record.name); if (lootConfig == null) { lootConfig = record; } else { lootConfig.kills += record.kills; for (int i = 0; i < record.drops.length; i += 2) { lootConfig.add(record.drops[i], record.drops[i + 1]); } } lootConfig.last = now; setLootConfig(lootConfig.type, lootConfig.name, lootConfig); } } private void resetEvent() { inventoryId = null; inventorySnapshot = null; inventorySnapshotCb = null; } @FunctionalInterface interface InvChangeCallback { void accept(Collection<ItemStack> invItems, Collection<ItemStack> groundItems, Multiset<Integer> removedItems); } private InvChangeCallback collectInvItems(LootRecordType type, String event) { return collectInvItems(type, event, null); } private InvChangeCallback collectInvItems(LootRecordType type, String event, Object metadata) { return (invItems, groundItems, removedItems) -> addLoot(event, -1, type, metadata, invItems); } private InvChangeCallback collectInvAndGroundItems(LootRecordType type, String event) { return collectInvAndGroundItems(type, event, null); } private InvChangeCallback collectInvAndGroundItems(LootRecordType type, String event, Object metadata) { return (invItems, groundItems, removedItems) -> { List<ItemStack> combined = new ArrayList<>(); combined.addAll(invItems); combined.addAll(groundItems); addLoot(event, -1, type, metadata, combined); }; } private void onInvChange(InvChangeCallback cb) { onInvChange(InventoryID.INVENTORY, cb); } private void onInvChange(InventoryID inv, InvChangeCallback cb) { inventoryId = inv; inventorySnapshot = HashMultiset.create(); inventorySnapshotCb = cb; final ItemContainer itemContainer = client.getItemContainer(inv); if (itemContainer != null) { Arrays.stream(itemContainer.getItems()) .forEach(item -> inventorySnapshot.add(item.getId(), item.getQuantity())); } } private boolean processHerbiboarHerbSackLoot(int timestamp) { List<ItemStack> herbs = new ArrayList<>(); for (MessageNode messageNode : client.getMessages()) { if (messageNode.getTimestamp() != timestamp || messageNode.getType() != ChatMessageType.SPAM) { continue; } Matcher matcher = HERBIBOAR_HERB_SACK_PATTERN.matcher(messageNode.getValue()); if (matcher.matches()) { herbs.add(new ItemStack(itemManager.search(matcher.group(1)).get(0).getId(), 1, client.getLocalPlayer().getLocalLocation())); } } if (herbs.isEmpty()) { return false; } int herbloreLevel = client.getBoostedSkillLevel(Skill.HERBLORE); addLoot(HERBIBOAR_EVENT, -1, LootRecordType.EVENT, herbloreLevel, herbs); return true; } void toggleItem(String name, boolean ignore) { final Set<String> ignoredItemSet = new LinkedHashSet<>(ignoredItems); if (ignore) { ignoredItemSet.add(name); } else { ignoredItemSet.remove(name); } config.setIgnoredItems(Text.toCSV(ignoredItemSet)); // the config changed will update the panel } boolean isIgnored(String name) { return ignoredItems.contains(name); } void toggleEvent(String name, boolean ignore) { final Set<String> ignoredSet = new LinkedHashSet<>(ignoredEvents); if (ignore) { ignoredSet.add(name); } else { ignoredSet.remove(name); } config.setIgnoredEvents(Text.toCSV(ignoredSet)); // the config changed will update the panel } boolean isEventIgnored(String name) { return ignoredEvents.contains(name); } private LootTrackerItem buildLootTrackerItem(int itemId, int quantity) { final ItemComposition itemComposition = itemManager.getItemComposition(itemId); final int gePrice = itemManager.getItemPrice(itemId); final int haPrice = itemComposition.getHaPrice(); final boolean ignored = ignoredItems.contains(itemComposition.getMembersName()); return new LootTrackerItem( itemId, itemComposition.getMembersName(), quantity, gePrice, haPrice, ignored); } private LootTrackerItem[] buildEntries(final Collection<ItemStack> itemStacks) { return itemStacks.stream() .map(itemStack -> buildLootTrackerItem(itemStack.getId(), itemStack.getQuantity())) .toArray(LootTrackerItem[]::new); } private static Collection<GameItem> toGameItems(Collection<ItemStack> items) { return items.stream() .map(item -> new GameItem(item.getId(), item.getQuantity())) .collect(Collectors.toList()); } private Collection<LootTrackerRecord> convertToLootTrackerRecord(final Collection<LootAggregate> records) { return records.stream() .sorted(Comparator.comparing(LootAggregate::getLast_time)) .map(record -> { LootTrackerItem[] drops = record.getDrops().stream().map(itemStack -> buildLootTrackerItem(itemStack.getId(), itemStack.getQty()) ).toArray(LootTrackerItem[]::new); return new LootTrackerRecord(record.getEventId(), "", record.getType(), drops, record.getAmount()); }) .collect(Collectors.toCollection(ArrayList::new)); } private LootTrackerRecord convertToLootTrackerRecord(final ConfigLoot configLoot) { LootTrackerItem[] items = new LootTrackerItem[configLoot.drops.length / 2]; for (int i = 0; i < configLoot.drops.length; i += 2) { int id = configLoot.drops[i]; int qty = configLoot.drops[i + 1]; items[i >> 1] = buildLootTrackerItem(id, qty); } return new LootTrackerRecord(configLoot.name, "", configLoot.type, items, configLoot.kills); } /** * Is player currently within the provided map regions */ private boolean isPlayerWithinMapRegion(Set<Integer> definedMapRegions) { final int[] mapRegions = client.getMapRegions(); for (int region : mapRegions) { if (definedMapRegions.contains(region)) { return true; } } return false; } private void lootReceivedChatMessage(final Collection<ItemStack> items, final String name) { long totalPrice = items.stream() .mapToLong(is -> (long) itemManager.getItemPrice(is.getId()) * is.getQuantity()) .sum(); final String message = new ChatMessageBuilder() .append(ChatColorType.HIGHLIGHT) .append("You've killed ") .append(name) .append(" for ") .append(QuantityFormatter.quantityToStackSize(totalPrice)) .append(" loot.") .build(); chatMessageManager.queue( QueuedMessage.builder() .type(ChatMessageType.CONSOLE) .runeLiteFormattedMessage(message) .build()); } ConfigLoot getLootConfig(LootRecordType type, String name) { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to get loot with no profile!"); return null; } String json = configManager.getConfiguration(LootTrackerConfig.GROUP, profile, "drops_" + type + "_" + name); if (json == null) { return null; } return gson.fromJson(json, ConfigLoot.class); } void setLootConfig(LootRecordType type, String name, ConfigLoot loot) { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to set loot with no profile!"); return; } String json = gson.toJson(loot); configManager.setConfiguration(LootTrackerConfig.GROUP, profile, "drops_" + type + "_" + name, json); } void removeLootConfig(LootRecordType type, String name) { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to remove loot with no profile!"); return; } configManager.unsetConfiguration(LootTrackerConfig.GROUP, profile, "drops_" + type + "_" + name); } void removeAllLoot() { String profile = profileKey; if (Strings.isNullOrEmpty(profile)) { log.debug("Trying to clear loot with no profile!"); return; } for (String key : configManager.getRSProfileConfigurationKeys(LootTrackerConfig.GROUP, profile, "drops_")) { configManager.unsetConfiguration(LootTrackerConfig.GROUP, profile, key); } } }
loottracker: add cache of runes
runelite-client/src/main/java/net/runelite/client/plugins/loottracker/LootTrackerPlugin.java
loottracker: add cache of runes
Java
mit
99f2f5ffdd4b35e4d9b15ccc6deb55714fd686ef
0
javache/react-native,pandiaraj44/react-native,janicduplessis/react-native,facebook/react-native,facebook/react-native,janicduplessis/react-native,javache/react-native,pandiaraj44/react-native,javache/react-native,pandiaraj44/react-native,javache/react-native,janicduplessis/react-native,myntra/react-native,janicduplessis/react-native,javache/react-native,myntra/react-native,myntra/react-native,pandiaraj44/react-native,facebook/react-native,pandiaraj44/react-native,facebook/react-native,myntra/react-native,facebook/react-native,janicduplessis/react-native,arthuralee/react-native,arthuralee/react-native,janicduplessis/react-native,janicduplessis/react-native,facebook/react-native,facebook/react-native,javache/react-native,arthuralee/react-native,arthuralee/react-native,javache/react-native,pandiaraj44/react-native,myntra/react-native,myntra/react-native,facebook/react-native,myntra/react-native,myntra/react-native,myntra/react-native,pandiaraj44/react-native,javache/react-native,facebook/react-native,javache/react-native,arthuralee/react-native,janicduplessis/react-native,pandiaraj44/react-native
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.fabric.mounting; import static com.facebook.infer.annotation.ThreadConfined.ANY; import android.content.Context; import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; import androidx.annotation.AnyThread; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.UiThread; import com.facebook.common.logging.FLog; import com.facebook.infer.annotation.Assertions; import com.facebook.infer.annotation.ThreadConfined; import com.facebook.react.bridge.ReactSoftException; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableNativeMap; import com.facebook.react.bridge.RetryableMountingLayerException; import com.facebook.react.bridge.SoftAssertions; import com.facebook.react.bridge.UiThreadUtil; import com.facebook.react.common.build.ReactBuildConfig; import com.facebook.react.fabric.FabricUIManager; import com.facebook.react.fabric.events.EventEmitterWrapper; import com.facebook.react.fabric.mounting.mountitems.MountItem; import com.facebook.react.touch.JSResponderHandler; import com.facebook.react.uimanager.IllegalViewOperationException; import com.facebook.react.uimanager.ReactStylesDiffMap; import com.facebook.react.uimanager.RootView; import com.facebook.react.uimanager.RootViewManager; import com.facebook.react.uimanager.StateWrapper; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.ViewGroupManager; import com.facebook.react.uimanager.ViewManager; import com.facebook.react.uimanager.ViewManagerRegistry; import com.facebook.yoga.YogaMeasureMode; import java.util.concurrent.ConcurrentHashMap; /** * Class responsible for actually dispatching view updates enqueued via {@link * FabricUIManager#scheduleMountItems(int, MountItem[])} on the UI thread. */ public class MountingManager { public static final String TAG = MountingManager.class.getSimpleName(); private static final boolean SHOW_CHANGED_VIEW_HIERARCHIES = ReactBuildConfig.DEBUG && false; @NonNull private final ConcurrentHashMap<Integer, ViewState> mTagToViewState; @NonNull private final JSResponderHandler mJSResponderHandler = new JSResponderHandler(); @NonNull private final ViewManagerRegistry mViewManagerRegistry; @NonNull private final RootViewManager mRootViewManager = new RootViewManager(); public MountingManager(@NonNull ViewManagerRegistry viewManagerRegistry) { mTagToViewState = new ConcurrentHashMap<>(); mViewManagerRegistry = viewManagerRegistry; } private static void logViewHierarchy(ViewGroup parent, boolean recurse) { int parentTag = parent.getId(); FLog.e(TAG, " <ViewGroup tag=" + parentTag + " class=" + parent.getClass().toString() + ">"); for (int i = 0; i < parent.getChildCount(); i++) { FLog.e( TAG, " <View idx=" + i + " tag=" + parent.getChildAt(i).getId() + " class=" + parent.getChildAt(i).getClass().toString() + ">"); } FLog.e(TAG, " </ViewGroup tag=" + parentTag + ">"); if (recurse) { FLog.e(TAG, "Displaying Ancestors:"); ViewParent ancestor = parent.getParent(); while (ancestor != null) { ViewGroup ancestorViewGroup = (ancestor instanceof ViewGroup ? (ViewGroup) ancestor : null); int ancestorId = ancestorViewGroup == null ? View.NO_ID : ancestorViewGroup.getId(); FLog.e( TAG, "<ViewParent tag=" + ancestorId + " class=" + ancestor.getClass().toString() + ">"); ancestor = ancestor.getParent(); } } } /** * This mutates the rootView, which is an Android View, so this should only be called on the UI * thread. * * @param reactRootTag * @param rootView */ @AnyThread public void addRootView(final int reactRootTag, @NonNull final View rootView) { mTagToViewState.put( reactRootTag, new ViewState(reactRootTag, rootView, mRootViewManager, true)); UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { if (rootView.getId() != View.NO_ID) { FLog.e( TAG, "Trying to add RootTag to RootView that already has a tag: existing tag: [%d] new tag: [%d]", rootView.getId(), reactRootTag); throw new IllegalViewOperationException( "Trying to add a root view with an explicit id already set. React Native uses " + "the id field to track react tags and will overwrite this field. If that is fine, " + "explicitly overwrite the id field to View.NO_ID before calling addRootView."); } rootView.setId(reactRootTag); } }); } /** Delete rootView and all children recursively. */ @UiThread public void deleteRootView(int reactRootTag) { ViewState rootViewState = mTagToViewState.get(reactRootTag); if (rootViewState != null && rootViewState.mView != null) { dropView(rootViewState.mView, true); } } /** Releases all references to given native View. */ @UiThread private void dropView(@NonNull View view, boolean deleteImmediately) { UiThreadUtil.assertOnUiThread(); final int reactTag = view.getId(); ViewState state = getViewState(reactTag); ViewManager viewManager = state.mViewManager; if (!state.mIsRoot && viewManager != null) { // For non-root views we notify viewmanager with {@link ViewManager#onDropInstance} viewManager.onDropViewInstance(view); } if (view instanceof ViewGroup && viewManager instanceof ViewGroupManager) { final ViewGroup viewGroup = (ViewGroup) view; final ViewGroupManager<ViewGroup> viewGroupManager = getViewGroupManager(state); // As documented elsewhere, sometimes when a child is removed from a parent, that change // is not immediately available in the hierarchy until a future UI tick. This can cause // inconsistent child counts, etc, but it can _also_ cause us to drop views that shouldn't, // because they're removed from the parent but that change isn't immediately visible. So, // we do two things: 1) delay this logic until the next UI thread tick, 2) ignore children // who don't report the expected parent. // For most cases, we _do not_ want this logic to run, anyway, since it either means that we // don't have a correct set of MountingInstructions; or it means that we're tearing down an // entire screen, in which case we can safely delete everything immediately, not having // executed any remove instructions immediately before this. if (deleteImmediately) { dropChildren(reactTag, viewGroup, viewGroupManager); } else { UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { dropChildren(reactTag, viewGroup, viewGroupManager); } }); } } mTagToViewState.remove(reactTag); } @UiThread private void dropChildren( int reactTag, @NonNull ViewGroup viewGroup, @NonNull ViewGroupManager<ViewGroup> viewGroupManager) { for (int i = viewGroupManager.getChildCount(viewGroup) - 1; i >= 0; i--) { View child = viewGroupManager.getChildAt(viewGroup, i); if (getNullableViewState(child.getId()) != null) { if (SHOW_CHANGED_VIEW_HIERARCHIES) { FLog.e( TAG, "Automatically dropping view that is still attached to a parent being dropped. Parent: [" + reactTag + "] child: [" + child.getId() + "]"); } ViewParent childParent = child.getParent(); if (childParent == null || !childParent.equals(viewGroup)) { int childParentId = (childParent == null ? -1 : (childParent instanceof ViewGroup ? ((ViewGroup) childParent).getId() : -1)); FLog.e( TAG, "Recursively deleting children of [" + reactTag + "] but parent of child [" + child.getId() + "] is [" + childParentId + "]"); } else { dropView(child, true); } } viewGroupManager.removeViewAt(viewGroup, i); } } @UiThread public void addViewAt(final int parentTag, final int tag, final int index) { UiThreadUtil.assertOnUiThread(); ViewState parentViewState = getViewState(parentTag); if (!(parentViewState.mView instanceof ViewGroup)) { String message = "Unable to add a view into a view that is not a ViewGroup. ParentTag: " + parentTag + " - Tag: " + tag + " - Index: " + index; FLog.e(TAG, message); throw new IllegalStateException(message); } final ViewGroup parentView = (ViewGroup) parentViewState.mView; ViewState viewState = getViewState(tag); final View view = viewState.mView; if (view == null) { throw new IllegalStateException( "Unable to find view for viewState " + viewState + " and tag " + tag); } // Display children before inserting if (SHOW_CHANGED_VIEW_HIERARCHIES) { FLog.e(TAG, "addViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + index + " BEFORE"); logViewHierarchy(parentView, false); } try { getViewGroupManager(parentViewState).addView(parentView, view, index); } catch (IllegalStateException e) { // Wrap error with more context for debugging throw new IllegalStateException( "addViewAt: failed to insert view [" + tag + "] into parent [" + parentTag + "] at index " + index, e); } // Display children after inserting if (SHOW_CHANGED_VIEW_HIERARCHIES) { UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { FLog.e( TAG, "addViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + index + " AFTER"); logViewHierarchy(parentView, false); } }); } } @NonNull private ViewState getViewState(int tag) { ViewState viewState = mTagToViewState.get(tag); if (viewState == null) { throw new RetryableMountingLayerException("Unable to find viewState view for tag " + tag); } return viewState; } public boolean getViewExists(int tag) { return mTagToViewState.get(tag) != null; } private @Nullable ViewState getNullableViewState(int tag) { return mTagToViewState.get(tag); } @Deprecated public void receiveCommand(int reactTag, int commandId, @Nullable ReadableArray commandArgs) { ViewState viewState = getNullableViewState(reactTag); // It's not uncommon for JS to send events as/after a component is being removed from the // view hierarchy. For example, TextInput may send a "blur" command in response to the view // disappearing. Throw `ReactNoCrashSoftException` so they're logged but don't crash in dev // for now. if (viewState == null) { throw new RetryableMountingLayerException( "Unable to find viewState for tag: " + reactTag + " for commandId: " + commandId); } if (viewState.mViewManager == null) { throw new RetryableMountingLayerException("Unable to find viewManager for tag " + reactTag); } if (viewState.mView == null) { throw new RetryableMountingLayerException( "Unable to find viewState view for tag " + reactTag); } viewState.mViewManager.receiveCommand(viewState.mView, commandId, commandArgs); } public void receiveCommand( int reactTag, @NonNull String commandId, @Nullable ReadableArray commandArgs) { ViewState viewState = getNullableViewState(reactTag); // It's not uncommon for JS to send events as/after a component is being removed from the // view hierarchy. For example, TextInput may send a "blur" command in response to the view // disappearing. Throw `ReactNoCrashSoftException` so they're logged but don't crash in dev // for now. if (viewState == null) { throw new RetryableMountingLayerException( "Unable to find viewState for tag: " + reactTag + " for commandId: " + commandId); } if (viewState.mViewManager == null) { throw new RetryableMountingLayerException( "Unable to find viewState manager for tag " + reactTag); } if (viewState.mView == null) { throw new RetryableMountingLayerException( "Unable to find viewState view for tag " + reactTag); } viewState.mViewManager.receiveCommand(viewState.mView, commandId, commandArgs); } public void sendAccessibilityEvent(int reactTag, int eventType) { ViewState viewState = getViewState(reactTag); if (viewState.mViewManager == null) { throw new RetryableMountingLayerException( "Unable to find viewState manager for tag " + reactTag); } if (viewState.mView == null) { throw new RetryableMountingLayerException( "Unable to find viewState view for tag " + reactTag); } viewState.mView.sendAccessibilityEvent(eventType); } @SuppressWarnings("unchecked") // prevents unchecked conversion warn of the <ViewGroup> type private static @NonNull ViewGroupManager<ViewGroup> getViewGroupManager( @NonNull ViewState viewState) { if (viewState.mViewManager == null) { throw new IllegalStateException("Unable to find ViewManager for view: " + viewState); } return (ViewGroupManager<ViewGroup>) viewState.mViewManager; } @UiThread public void removeViewAt(final int tag, final int parentTag, int index) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getNullableViewState(parentTag); if (viewState == null) { ReactSoftException.logSoftException( MountingManager.TAG, new IllegalStateException( "Unable to find viewState for tag: " + parentTag + " for removeViewAt")); return; } final ViewGroup parentView = (ViewGroup) viewState.mView; if (parentView == null) { throw new IllegalStateException("Unable to find view for tag " + parentTag); } if (SHOW_CHANGED_VIEW_HIERARCHIES) { // Display children before deleting any FLog.e(TAG, "removeViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + index + " BEFORE"); logViewHierarchy(parentView, false); } ViewGroupManager<ViewGroup> viewGroupManager = getViewGroupManager(viewState); // Verify that the view we're about to remove has the same tag we expect View view = viewGroupManager.getChildAt(parentView, index); int actualTag = (view != null ? view.getId() : -1); if (actualTag != tag) { int tagActualIndex = -1; int parentChildrenCount = parentView.getChildCount(); for (int i = 0; i < parentChildrenCount; i++) { if (parentView.getChildAt(i).getId() == tag) { tagActualIndex = i; break; } } // TODO T74425739: previously, we did not do this check and `removeViewAt` would be executed // below, sometimes crashing there. *However*, interestingly enough, `removeViewAt` would not // complain if you removed views from an already-empty parent. This seems necessary currently // for certain ViewManagers that remove their own children - like BottomSheet? // This workaround seems not-great, but for now, we just return here for // backwards-compatibility. Essentially, if a view has already been removed from the // hierarchy, we treat it as a noop. if (tagActualIndex == -1) { FLog.e( TAG, "removeViewAt: [" + tag + "] -> [" + parentTag + "] @" + index + ": view already removed from parent! Children in parent: " + parentChildrenCount); return; } // Here we are guaranteed that the view is still in the View hierarchy, just // at a different index. In debug mode we'll crash here; in production, we'll remove // the child from the parent and move on. // This is an issue that is safely recoverable 95% of the time. If this allows corruption // of the view hierarchy and causes bugs or a crash after this point, there will be logs // indicating that this happened. // This is likely *only* necessary because of Fabric's LayoutAnimations implementation. // If we can fix the bug there, or remove the need for LayoutAnimation index adjustment // entirely, we can just throw this exception without regression user experience. logViewHierarchy(parentView, true); ReactSoftException.logSoftException( TAG, new IllegalStateException( "Tried to remove view [" + tag + "] of parent [" + parentTag + "] at index " + index + ", but got view tag " + actualTag + " - actual index of view: " + tagActualIndex)); index = tagActualIndex; } try { viewGroupManager.removeViewAt(parentView, index); } catch (RuntimeException e) { // Note: `getChildCount` may not always be accurate! // We don't currently have a good explanation other than, in situations where you // would empirically expect to see childCount > 0, the childCount is reported as 0. // This is likely due to a ViewManager overriding getChildCount or some other methods // in a way that is strictly incorrect, but potentially only visible here. // The failure mode is actually that in `removeViewAt`, a NullPointerException is // thrown when we try to perform an operation on a View that doesn't exist, and // is therefore null. // We try to add some extra diagnostics here, but we always try to remove the View // from the hierarchy first because detecting by looking at childCount will not work. // // Note that the lesson here is that `getChildCount` is not /required/ to adhere to // any invariants. If you add 9 children to a parent, the `getChildCount` of the parent // may not be equal to 9. This apparently causes no issues with Android and is common // enough that we shouldn't try to change this invariant, without a lot of thought. int childCount = viewGroupManager.getChildCount(parentView); logViewHierarchy(parentView, true); throw new IllegalStateException( "Cannot remove child at index " + index + " from parent ViewGroup [" + parentView.getId() + "], only " + childCount + " children in parent. Warning: childCount may be incorrect!", e); } // Display children after deleting any if (SHOW_CHANGED_VIEW_HIERARCHIES) { final int finalIndex = index; UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { FLog.e( TAG, "removeViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + finalIndex + " AFTER"); logViewHierarchy(parentView, false); } }); } } @UiThread public void createView( @NonNull ThemedReactContext themedReactContext, @NonNull String componentName, int reactTag, @Nullable ReadableMap props, @Nullable StateWrapper stateWrapper, boolean isLayoutable) { if (getNullableViewState(reactTag) != null) { return; } View view = null; ViewManager viewManager = null; ReactStylesDiffMap propsDiffMap = null; if (props != null) { propsDiffMap = new ReactStylesDiffMap(props); } if (isLayoutable) { viewManager = mViewManagerRegistry.get(componentName); // View Managers are responsible for dealing with initial state and props. view = viewManager.createView( themedReactContext, propsDiffMap, stateWrapper, mJSResponderHandler); view.setId(reactTag); } ViewState viewState = new ViewState(reactTag, view, viewManager); viewState.mCurrentProps = propsDiffMap; viewState.mCurrentState = (stateWrapper != null ? stateWrapper.getState() : null); mTagToViewState.put(reactTag, viewState); } @UiThread public void updateProps(int reactTag, @Nullable ReadableMap props) { if (props == null) { return; } UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); viewState.mCurrentProps = new ReactStylesDiffMap(props); View view = viewState.mView; if (view == null) { throw new IllegalStateException("Unable to find view for tag " + reactTag); } Assertions.assertNotNull(viewState.mViewManager) .updateProperties(view, viewState.mCurrentProps); } @UiThread public void updateLayout(int reactTag, int x, int y, int width, int height) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); // Do not layout Root Views if (viewState.mIsRoot) { return; } View viewToUpdate = viewState.mView; if (viewToUpdate == null) { throw new IllegalStateException("Unable to find View for tag: " + reactTag); } viewToUpdate.measure( View.MeasureSpec.makeMeasureSpec(width, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(height, View.MeasureSpec.EXACTLY)); ViewParent parent = viewToUpdate.getParent(); if (parent instanceof RootView) { parent.requestLayout(); } // TODO: T31905686 Check if the parent of the view has to layout the view, or the child has // to lay itself out. see NativeViewHierarchyManager.updateLayout viewToUpdate.layout(x, y, x + width, y + height); } @UiThread public void updatePadding(int reactTag, int left, int top, int right, int bottom) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); // Do not layout Root Views if (viewState.mIsRoot) { return; } View viewToUpdate = viewState.mView; if (viewToUpdate == null) { throw new IllegalStateException("Unable to find View for tag: " + reactTag); } ViewManager viewManager = viewState.mViewManager; if (viewManager == null) { throw new IllegalStateException("Unable to find ViewManager for view: " + viewState); } //noinspection unchecked viewManager.setPadding(viewToUpdate, left, top, right, bottom); } @UiThread public void deleteView(int reactTag) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getNullableViewState(reactTag); if (viewState == null) { ReactSoftException.logSoftException( MountingManager.TAG, new IllegalStateException( "Unable to find viewState for tag: " + reactTag + " for deleteView")); return; } // To delete we simply remove the tag from the registry. // In the past we called dropView here, but we want to rely on either // (1) the correct set of MountInstructions being sent to the platform // and/or (2) dropView being called by stopSurface. // If Views are orphaned at this stage and leaked, it's a problem in // the differ or LayoutAnimations, not MountingManager. // Additionally, as documented in `dropView`, we cannot always trust a // view's children to be up-to-date. mTagToViewState.remove(reactTag); // For non-root views we notify viewmanager with {@link ViewManager#onDropInstance} ViewManager viewManager = viewState.mViewManager; if (!viewState.mIsRoot && viewManager != null) { viewManager.onDropViewInstance(viewState.mView); } } @UiThread public void updateState(final int reactTag, @Nullable StateWrapper stateWrapper) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); @Nullable ReadableNativeMap newState = stateWrapper == null ? null : stateWrapper.getState(); viewState.mCurrentState = newState; ViewManager viewManager = viewState.mViewManager; if (viewManager == null) { throw new IllegalStateException("Unable to find ViewManager for tag: " + reactTag); } Object extraData = viewManager.updateState(viewState.mView, viewState.mCurrentProps, stateWrapper); if (extraData != null) { viewManager.updateExtraData(viewState.mView, extraData); } } @UiThread public void preallocateView( @NonNull ThemedReactContext reactContext, String componentName, int reactTag, @Nullable ReadableMap props, @Nullable StateWrapper stateWrapper, boolean isLayoutable) { if (getNullableViewState(reactTag) != null) { throw new IllegalStateException( "View for component " + componentName + " with tag " + reactTag + " already exists."); } createView(reactContext, componentName, reactTag, props, stateWrapper, isLayoutable); } @UiThread public void updateEventEmitter(int reactTag, @NonNull EventEmitterWrapper eventEmitter) { UiThreadUtil.assertOnUiThread(); ViewState viewState = mTagToViewState.get(reactTag); if (viewState == null) { // TODO T62717437 - Use a flag to determine that these event emitters belong to virtual nodes // only. viewState = new ViewState(reactTag, null, null); mTagToViewState.put(reactTag, viewState); } viewState.mEventEmitter = eventEmitter; } /** * Set the JS responder for the view associated with the tags received as a parameter. * * <p>The JSResponder coordinates the return values of the onInterceptTouch method in Android * Views. This allows JS to coordinate when a touch should be handled by JS or by the Android * native views. See {@link JSResponderHandler} for more details. * * <p>This method is going to be executed on the UIThread as soon as it is delivered from JS to * RN. * * <p>Currently, there is no warranty that the view associated with the react tag exists, because * this method is not handled by the react commit process. * * @param reactTag React tag of the first parent of the view that is NOT virtual * @param initialReactTag React tag of the JS view that initiated the touch operation * @param blockNativeResponder If native responder should be blocked or not */ @UiThread public synchronized void setJSResponder( int reactTag, int initialReactTag, boolean blockNativeResponder) { if (!blockNativeResponder) { mJSResponderHandler.setJSResponder(initialReactTag, null); return; } ViewState viewState = getViewState(reactTag); View view = viewState.mView; if (initialReactTag != reactTag && view instanceof ViewParent) { // In this case, initialReactTag corresponds to a virtual/layout-only View, and we already // have a parent of that View in reactTag, so we can use it. mJSResponderHandler.setJSResponder(initialReactTag, (ViewParent) view); return; } else if (view == null) { SoftAssertions.assertUnreachable("Cannot find view for tag " + reactTag + "."); return; } if (viewState.mIsRoot) { SoftAssertions.assertUnreachable( "Cannot block native responder on " + reactTag + " that is a root view"); } mJSResponderHandler.setJSResponder(initialReactTag, view.getParent()); } /** * Clears the JS Responder specified by {@link #setJSResponder(int, int, boolean)}. After this * method is called, all the touch events are going to be handled by JS. */ @UiThread public void clearJSResponder() { mJSResponderHandler.clearJSResponder(); } @AnyThread public long measure( @NonNull Context context, @NonNull String componentName, @NonNull ReadableMap localData, @NonNull ReadableMap props, @NonNull ReadableMap state, float width, @NonNull YogaMeasureMode widthMode, float height, @NonNull YogaMeasureMode heightMode, @Nullable float[] attachmentsPositions) { return mViewManagerRegistry .get(componentName) .measure( context, localData, props, state, width, widthMode, height, heightMode, attachmentsPositions); } @AnyThread @ThreadConfined(ANY) public @Nullable EventEmitterWrapper getEventEmitter(int reactTag) { ViewState viewState = getNullableViewState(reactTag); return viewState == null ? null : viewState.mEventEmitter; } /** * This class holds view state for react tags. Objects of this class are stored into the {@link * #mTagToViewState}, and they should be updated in the same thread. */ private static class ViewState { @Nullable final View mView; final int mReactTag; final boolean mIsRoot; @Nullable final ViewManager mViewManager; @Nullable public ReactStylesDiffMap mCurrentProps = null; @Nullable public ReadableMap mCurrentLocalData = null; @Nullable public ReadableMap mCurrentState = null; @Nullable public EventEmitterWrapper mEventEmitter = null; private ViewState(int reactTag, @Nullable View view, @Nullable ViewManager viewManager) { this(reactTag, view, viewManager, false); } private ViewState(int reactTag, @Nullable View view, ViewManager viewManager, boolean isRoot) { mReactTag = reactTag; mView = view; mIsRoot = isRoot; mViewManager = viewManager; } @Override public String toString() { boolean isLayoutOnly = mViewManager == null; return "ViewState [" + mReactTag + "] - isRoot: " + mIsRoot + " - props: " + mCurrentProps + " - localData: " + mCurrentLocalData + " - viewManager: " + mViewManager + " - isLayoutOnly: " + isLayoutOnly; } } }
ReactAndroid/src/main/java/com/facebook/react/fabric/mounting/MountingManager.java
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.fabric.mounting; import static com.facebook.infer.annotation.ThreadConfined.ANY; import android.content.Context; import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; import androidx.annotation.AnyThread; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.UiThread; import com.facebook.common.logging.FLog; import com.facebook.infer.annotation.Assertions; import com.facebook.infer.annotation.ThreadConfined; import com.facebook.react.bridge.ReactSoftException; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableNativeMap; import com.facebook.react.bridge.RetryableMountingLayerException; import com.facebook.react.bridge.SoftAssertions; import com.facebook.react.bridge.UiThreadUtil; import com.facebook.react.common.build.ReactBuildConfig; import com.facebook.react.fabric.FabricUIManager; import com.facebook.react.fabric.events.EventEmitterWrapper; import com.facebook.react.fabric.mounting.mountitems.MountItem; import com.facebook.react.touch.JSResponderHandler; import com.facebook.react.uimanager.IllegalViewOperationException; import com.facebook.react.uimanager.ReactStylesDiffMap; import com.facebook.react.uimanager.RootView; import com.facebook.react.uimanager.RootViewManager; import com.facebook.react.uimanager.StateWrapper; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.ViewGroupManager; import com.facebook.react.uimanager.ViewManager; import com.facebook.react.uimanager.ViewManagerRegistry; import com.facebook.yoga.YogaMeasureMode; import java.util.concurrent.ConcurrentHashMap; /** * Class responsible for actually dispatching view updates enqueued via {@link * FabricUIManager#scheduleMountItems(int, MountItem[])} on the UI thread. */ public class MountingManager { public static final String TAG = MountingManager.class.getSimpleName(); private static final boolean SHOW_CHANGED_VIEW_HIERARCHIES = ReactBuildConfig.DEBUG && false; @NonNull private final ConcurrentHashMap<Integer, ViewState> mTagToViewState; @NonNull private final JSResponderHandler mJSResponderHandler = new JSResponderHandler(); @NonNull private final ViewManagerRegistry mViewManagerRegistry; @NonNull private final RootViewManager mRootViewManager = new RootViewManager(); public MountingManager(@NonNull ViewManagerRegistry viewManagerRegistry) { mTagToViewState = new ConcurrentHashMap<>(); mViewManagerRegistry = viewManagerRegistry; } private static void logViewHierarchy(ViewGroup parent, boolean recurse) { int parentTag = parent.getId(); FLog.e(TAG, " <ViewGroup tag=" + parentTag + " class=" + parent.getClass().toString() + ">"); for (int i = 0; i < parent.getChildCount(); i++) { FLog.e( TAG, " <View idx=" + i + " tag=" + parent.getChildAt(i).getId() + " class=" + parent.getChildAt(i).getClass().toString() + ">"); } FLog.e(TAG, " </ViewGroup tag=" + parentTag + ">"); if (recurse) { FLog.e(TAG, "Displaying Ancestors:"); ViewParent ancestor = parent.getParent(); while (ancestor != null) { ViewGroup ancestorViewGroup = (ancestor instanceof ViewGroup ? (ViewGroup) ancestor : null); int ancestorId = ancestorViewGroup == null ? View.NO_ID : ancestorViewGroup.getId(); FLog.e( TAG, "<ViewParent tag=" + ancestorId + " class=" + ancestor.getClass().toString() + ">"); ancestor = ancestor.getParent(); } } } /** * This mutates the rootView, which is an Android View, so this should only be called on the UI * thread. * * @param reactRootTag * @param rootView */ @AnyThread public void addRootView(final int reactRootTag, @NonNull final View rootView) { mTagToViewState.put( reactRootTag, new ViewState(reactRootTag, rootView, mRootViewManager, true)); UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { if (rootView.getId() != View.NO_ID) { FLog.e( TAG, "Trying to add RootTag to RootView that already has a tag: existing tag: [%d] new tag: [%d]", rootView.getId(), reactRootTag); throw new IllegalViewOperationException( "Trying to add a root view with an explicit id already set. React Native uses " + "the id field to track react tags and will overwrite this field. If that is fine, " + "explicitly overwrite the id field to View.NO_ID before calling addRootView."); } rootView.setId(reactRootTag); } }); } /** Delete rootView and all children/ */ @UiThread public void deleteRootView(int reactRootTag) { if (mTagToViewState.containsKey(reactRootTag)) { dropView(mTagToViewState.get(reactRootTag).mView, true); } } /** Releases all references to given native View. */ @UiThread private void dropView(@NonNull View view, boolean deleteImmediately) { UiThreadUtil.assertOnUiThread(); final int reactTag = view.getId(); ViewState state = getViewState(reactTag); ViewManager viewManager = state.mViewManager; if (!state.mIsRoot && viewManager != null) { // For non-root views we notify viewmanager with {@link ViewManager#onDropInstance} viewManager.onDropViewInstance(view); } if (view instanceof ViewGroup && viewManager instanceof ViewGroupManager) { final ViewGroup viewGroup = (ViewGroup) view; final ViewGroupManager<ViewGroup> viewGroupManager = getViewGroupManager(state); // As documented elsewhere, sometimes when a child is removed from a parent, that change // is not immediately available in the hierarchy until a future UI tick. This can cause // inconsistent child counts, etc, but it can _also_ cause us to drop views that shouldn't, // because they're removed from the parent but that change isn't immediately visible. So, // we do two things: 1) delay this logic until the next UI thread tick, 2) ignore children // who don't report the expected parent. // For most cases, we _do not_ want this logic to run, anyway, since it either means that we // don't have a correct set of MountingInstructions; or it means that we're tearing down an // entire screen, in which case we can safely delete everything immediately, not having // executed any remove instructions immediately before this. if (deleteImmediately) { dropChildren(reactTag, viewGroup, viewGroupManager); } else { UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { dropChildren(reactTag, viewGroup, viewGroupManager); } }); } } mTagToViewState.remove(reactTag); } @UiThread private void dropChildren( int reactTag, @NonNull ViewGroup viewGroup, @NonNull ViewGroupManager<ViewGroup> viewGroupManager) { for (int i = viewGroupManager.getChildCount(viewGroup) - 1; i >= 0; i--) { View child = viewGroupManager.getChildAt(viewGroup, i); if (getNullableViewState(child.getId()) != null) { if (SHOW_CHANGED_VIEW_HIERARCHIES) { FLog.e( TAG, "Automatically dropping view that is still attached to a parent being dropped. Parent: [" + reactTag + "] child: [" + child.getId() + "]"); } ViewParent childParent = child.getParent(); if (childParent == null || !childParent.equals(viewGroup)) { int childParentId = (childParent == null ? -1 : (childParent instanceof ViewGroup ? ((ViewGroup) childParent).getId() : -1)); FLog.e( TAG, "Recursively deleting children of [" + reactTag + "] but parent of child [" + child.getId() + "] is [" + childParentId + "]"); } else { dropView(child, true); } } viewGroupManager.removeViewAt(viewGroup, i); } } @UiThread public void addViewAt(final int parentTag, final int tag, final int index) { UiThreadUtil.assertOnUiThread(); ViewState parentViewState = getViewState(parentTag); if (!(parentViewState.mView instanceof ViewGroup)) { String message = "Unable to add a view into a view that is not a ViewGroup. ParentTag: " + parentTag + " - Tag: " + tag + " - Index: " + index; FLog.e(TAG, message); throw new IllegalStateException(message); } final ViewGroup parentView = (ViewGroup) parentViewState.mView; ViewState viewState = getViewState(tag); final View view = viewState.mView; if (view == null) { throw new IllegalStateException( "Unable to find view for viewState " + viewState + " and tag " + tag); } // Display children before inserting if (SHOW_CHANGED_VIEW_HIERARCHIES) { FLog.e(TAG, "addViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + index + " BEFORE"); logViewHierarchy(parentView, false); } try { getViewGroupManager(parentViewState).addView(parentView, view, index); } catch (IllegalStateException e) { // Wrap error with more context for debugging throw new IllegalStateException( "addViewAt: failed to insert view [" + tag + "] into parent [" + parentTag + "] at index " + index, e); } // Display children after inserting if (SHOW_CHANGED_VIEW_HIERARCHIES) { UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { FLog.e( TAG, "addViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + index + " AFTER"); logViewHierarchy(parentView, false); } }); } } @NonNull private ViewState getViewState(int tag) { ViewState viewState = mTagToViewState.get(tag); if (viewState == null) { throw new RetryableMountingLayerException("Unable to find viewState view for tag " + tag); } return viewState; } public boolean getViewExists(int tag) { return mTagToViewState.get(tag) != null; } private @Nullable ViewState getNullableViewState(int tag) { return mTagToViewState.get(tag); } @Deprecated public void receiveCommand(int reactTag, int commandId, @Nullable ReadableArray commandArgs) { ViewState viewState = getNullableViewState(reactTag); // It's not uncommon for JS to send events as/after a component is being removed from the // view hierarchy. For example, TextInput may send a "blur" command in response to the view // disappearing. Throw `ReactNoCrashSoftException` so they're logged but don't crash in dev // for now. if (viewState == null) { throw new RetryableMountingLayerException( "Unable to find viewState for tag: " + reactTag + " for commandId: " + commandId); } if (viewState.mViewManager == null) { throw new RetryableMountingLayerException("Unable to find viewManager for tag " + reactTag); } if (viewState.mView == null) { throw new RetryableMountingLayerException( "Unable to find viewState view for tag " + reactTag); } viewState.mViewManager.receiveCommand(viewState.mView, commandId, commandArgs); } public void receiveCommand( int reactTag, @NonNull String commandId, @Nullable ReadableArray commandArgs) { ViewState viewState = getNullableViewState(reactTag); // It's not uncommon for JS to send events as/after a component is being removed from the // view hierarchy. For example, TextInput may send a "blur" command in response to the view // disappearing. Throw `ReactNoCrashSoftException` so they're logged but don't crash in dev // for now. if (viewState == null) { throw new RetryableMountingLayerException( "Unable to find viewState for tag: " + reactTag + " for commandId: " + commandId); } if (viewState.mViewManager == null) { throw new RetryableMountingLayerException( "Unable to find viewState manager for tag " + reactTag); } if (viewState.mView == null) { throw new RetryableMountingLayerException( "Unable to find viewState view for tag " + reactTag); } viewState.mViewManager.receiveCommand(viewState.mView, commandId, commandArgs); } public void sendAccessibilityEvent(int reactTag, int eventType) { ViewState viewState = getViewState(reactTag); if (viewState.mViewManager == null) { throw new RetryableMountingLayerException( "Unable to find viewState manager for tag " + reactTag); } if (viewState.mView == null) { throw new RetryableMountingLayerException( "Unable to find viewState view for tag " + reactTag); } viewState.mView.sendAccessibilityEvent(eventType); } @SuppressWarnings("unchecked") // prevents unchecked conversion warn of the <ViewGroup> type private static @NonNull ViewGroupManager<ViewGroup> getViewGroupManager( @NonNull ViewState viewState) { if (viewState.mViewManager == null) { throw new IllegalStateException("Unable to find ViewManager for view: " + viewState); } return (ViewGroupManager<ViewGroup>) viewState.mViewManager; } @UiThread public void removeViewAt(final int tag, final int parentTag, int index) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getNullableViewState(parentTag); if (viewState == null) { ReactSoftException.logSoftException( MountingManager.TAG, new IllegalStateException( "Unable to find viewState for tag: " + parentTag + " for removeViewAt")); return; } final ViewGroup parentView = (ViewGroup) viewState.mView; if (parentView == null) { throw new IllegalStateException("Unable to find view for tag " + parentTag); } if (SHOW_CHANGED_VIEW_HIERARCHIES) { // Display children before deleting any FLog.e(TAG, "removeViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + index + " BEFORE"); logViewHierarchy(parentView, false); } ViewGroupManager<ViewGroup> viewGroupManager = getViewGroupManager(viewState); // Verify that the view we're about to remove has the same tag we expect View view = viewGroupManager.getChildAt(parentView, index); int actualTag = (view != null ? view.getId() : -1); if (actualTag != tag) { int tagActualIndex = -1; int parentChildrenCount = parentView.getChildCount(); for (int i = 0; i < parentChildrenCount; i++) { if (parentView.getChildAt(i).getId() == tag) { tagActualIndex = i; break; } } // TODO T74425739: previously, we did not do this check and `removeViewAt` would be executed // below, sometimes crashing there. *However*, interestingly enough, `removeViewAt` would not // complain if you removed views from an already-empty parent. This seems necessary currently // for certain ViewManagers that remove their own children - like BottomSheet? // This workaround seems not-great, but for now, we just return here for // backwards-compatibility. Essentially, if a view has already been removed from the // hierarchy, we treat it as a noop. if (tagActualIndex == -1) { FLog.e( TAG, "removeViewAt: [" + tag + "] -> [" + parentTag + "] @" + index + ": view already removed from parent! Children in parent: " + parentChildrenCount); return; } // Here we are guaranteed that the view is still in the View hierarchy, just // at a different index. In debug mode we'll crash here; in production, we'll remove // the child from the parent and move on. // This is an issue that is safely recoverable 95% of the time. If this allows corruption // of the view hierarchy and causes bugs or a crash after this point, there will be logs // indicating that this happened. // This is likely *only* necessary because of Fabric's LayoutAnimations implementation. // If we can fix the bug there, or remove the need for LayoutAnimation index adjustment // entirely, we can just throw this exception without regression user experience. logViewHierarchy(parentView, true); ReactSoftException.logSoftException( TAG, new IllegalStateException( "Tried to remove view [" + tag + "] of parent [" + parentTag + "] at index " + index + ", but got view tag " + actualTag + " - actual index of view: " + tagActualIndex)); index = tagActualIndex; } try { viewGroupManager.removeViewAt(parentView, index); } catch (RuntimeException e) { // Note: `getChildCount` may not always be accurate! // We don't currently have a good explanation other than, in situations where you // would empirically expect to see childCount > 0, the childCount is reported as 0. // This is likely due to a ViewManager overriding getChildCount or some other methods // in a way that is strictly incorrect, but potentially only visible here. // The failure mode is actually that in `removeViewAt`, a NullPointerException is // thrown when we try to perform an operation on a View that doesn't exist, and // is therefore null. // We try to add some extra diagnostics here, but we always try to remove the View // from the hierarchy first because detecting by looking at childCount will not work. // // Note that the lesson here is that `getChildCount` is not /required/ to adhere to // any invariants. If you add 9 children to a parent, the `getChildCount` of the parent // may not be equal to 9. This apparently causes no issues with Android and is common // enough that we shouldn't try to change this invariant, without a lot of thought. int childCount = viewGroupManager.getChildCount(parentView); logViewHierarchy(parentView, true); throw new IllegalStateException( "Cannot remove child at index " + index + " from parent ViewGroup [" + parentView.getId() + "], only " + childCount + " children in parent. Warning: childCount may be incorrect!", e); } // Display children after deleting any if (SHOW_CHANGED_VIEW_HIERARCHIES) { final int finalIndex = index; UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { FLog.e( TAG, "removeViewAt: [" + tag + "] -> [" + parentTag + "] idx: " + finalIndex + " AFTER"); logViewHierarchy(parentView, false); } }); } } @UiThread public void createView( @NonNull ThemedReactContext themedReactContext, @NonNull String componentName, int reactTag, @Nullable ReadableMap props, @Nullable StateWrapper stateWrapper, boolean isLayoutable) { if (getNullableViewState(reactTag) != null) { return; } View view = null; ViewManager viewManager = null; ReactStylesDiffMap propsDiffMap = null; if (props != null) { propsDiffMap = new ReactStylesDiffMap(props); } if (isLayoutable) { viewManager = mViewManagerRegistry.get(componentName); // View Managers are responsible for dealing with initial state and props. view = viewManager.createView( themedReactContext, propsDiffMap, stateWrapper, mJSResponderHandler); view.setId(reactTag); } ViewState viewState = new ViewState(reactTag, view, viewManager); viewState.mCurrentProps = propsDiffMap; viewState.mCurrentState = (stateWrapper != null ? stateWrapper.getState() : null); mTagToViewState.put(reactTag, viewState); } @UiThread public void updateProps(int reactTag, @Nullable ReadableMap props) { if (props == null) { return; } UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); viewState.mCurrentProps = new ReactStylesDiffMap(props); View view = viewState.mView; if (view == null) { throw new IllegalStateException("Unable to find view for tag " + reactTag); } Assertions.assertNotNull(viewState.mViewManager) .updateProperties(view, viewState.mCurrentProps); } @UiThread public void updateLayout(int reactTag, int x, int y, int width, int height) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); // Do not layout Root Views if (viewState.mIsRoot) { return; } View viewToUpdate = viewState.mView; if (viewToUpdate == null) { throw new IllegalStateException("Unable to find View for tag: " + reactTag); } viewToUpdate.measure( View.MeasureSpec.makeMeasureSpec(width, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(height, View.MeasureSpec.EXACTLY)); ViewParent parent = viewToUpdate.getParent(); if (parent instanceof RootView) { parent.requestLayout(); } // TODO: T31905686 Check if the parent of the view has to layout the view, or the child has // to lay itself out. see NativeViewHierarchyManager.updateLayout viewToUpdate.layout(x, y, x + width, y + height); } @UiThread public void updatePadding(int reactTag, int left, int top, int right, int bottom) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); // Do not layout Root Views if (viewState.mIsRoot) { return; } View viewToUpdate = viewState.mView; if (viewToUpdate == null) { throw new IllegalStateException("Unable to find View for tag: " + reactTag); } ViewManager viewManager = viewState.mViewManager; if (viewManager == null) { throw new IllegalStateException("Unable to find ViewManager for view: " + viewState); } //noinspection unchecked viewManager.setPadding(viewToUpdate, left, top, right, bottom); } @UiThread public void deleteView(int reactTag) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getNullableViewState(reactTag); if (viewState == null) { ReactSoftException.logSoftException( MountingManager.TAG, new IllegalStateException( "Unable to find viewState for tag: " + reactTag + " for deleteView")); return; } // To delete we simply remove the tag from the registry. // In the past we called dropView here, but we want to rely on either // (1) the correct set of MountInstructions being sent to the platform // and/or (2) dropView being called by stopSurface. // If Views are orphaned at this stage and leaked, it's a problem in // the differ or LayoutAnimations, not MountingManager. // Additionally, as documented in `dropView`, we cannot always trust a // view's children to be up-to-date. mTagToViewState.remove(reactTag); // For non-root views we notify viewmanager with {@link ViewManager#onDropInstance} ViewManager viewManager = viewState.mViewManager; if (!viewState.mIsRoot && viewManager != null) { viewManager.onDropViewInstance(viewState.mView); } } @UiThread public void updateState(final int reactTag, @Nullable StateWrapper stateWrapper) { UiThreadUtil.assertOnUiThread(); ViewState viewState = getViewState(reactTag); @Nullable ReadableNativeMap newState = stateWrapper == null ? null : stateWrapper.getState(); viewState.mCurrentState = newState; ViewManager viewManager = viewState.mViewManager; if (viewManager == null) { throw new IllegalStateException("Unable to find ViewManager for tag: " + reactTag); } Object extraData = viewManager.updateState(viewState.mView, viewState.mCurrentProps, stateWrapper); if (extraData != null) { viewManager.updateExtraData(viewState.mView, extraData); } } @UiThread public void preallocateView( @NonNull ThemedReactContext reactContext, String componentName, int reactTag, @Nullable ReadableMap props, @Nullable StateWrapper stateWrapper, boolean isLayoutable) { if (getNullableViewState(reactTag) != null) { throw new IllegalStateException( "View for component " + componentName + " with tag " + reactTag + " already exists."); } createView(reactContext, componentName, reactTag, props, stateWrapper, isLayoutable); } @UiThread public void updateEventEmitter(int reactTag, @NonNull EventEmitterWrapper eventEmitter) { UiThreadUtil.assertOnUiThread(); ViewState viewState = mTagToViewState.get(reactTag); if (viewState == null) { // TODO T62717437 - Use a flag to determine that these event emitters belong to virtual nodes // only. viewState = new ViewState(reactTag, null, null); mTagToViewState.put(reactTag, viewState); } viewState.mEventEmitter = eventEmitter; } /** * Set the JS responder for the view associated with the tags received as a parameter. * * <p>The JSResponder coordinates the return values of the onInterceptTouch method in Android * Views. This allows JS to coordinate when a touch should be handled by JS or by the Android * native views. See {@link JSResponderHandler} for more details. * * <p>This method is going to be executed on the UIThread as soon as it is delivered from JS to * RN. * * <p>Currently, there is no warranty that the view associated with the react tag exists, because * this method is not handled by the react commit process. * * @param reactTag React tag of the first parent of the view that is NOT virtual * @param initialReactTag React tag of the JS view that initiated the touch operation * @param blockNativeResponder If native responder should be blocked or not */ @UiThread public synchronized void setJSResponder( int reactTag, int initialReactTag, boolean blockNativeResponder) { if (!blockNativeResponder) { mJSResponderHandler.setJSResponder(initialReactTag, null); return; } ViewState viewState = getViewState(reactTag); View view = viewState.mView; if (initialReactTag != reactTag && view instanceof ViewParent) { // In this case, initialReactTag corresponds to a virtual/layout-only View, and we already // have a parent of that View in reactTag, so we can use it. mJSResponderHandler.setJSResponder(initialReactTag, (ViewParent) view); return; } else if (view == null) { SoftAssertions.assertUnreachable("Cannot find view for tag " + reactTag + "."); return; } if (viewState.mIsRoot) { SoftAssertions.assertUnreachable( "Cannot block native responder on " + reactTag + " that is a root view"); } mJSResponderHandler.setJSResponder(initialReactTag, view.getParent()); } /** * Clears the JS Responder specified by {@link #setJSResponder(int, int, boolean)}. After this * method is called, all the touch events are going to be handled by JS. */ @UiThread public void clearJSResponder() { mJSResponderHandler.clearJSResponder(); } @AnyThread public long measure( @NonNull Context context, @NonNull String componentName, @NonNull ReadableMap localData, @NonNull ReadableMap props, @NonNull ReadableMap state, float width, @NonNull YogaMeasureMode widthMode, float height, @NonNull YogaMeasureMode heightMode, @Nullable float[] attachmentsPositions) { return mViewManagerRegistry .get(componentName) .measure( context, localData, props, state, width, widthMode, height, heightMode, attachmentsPositions); } @AnyThread @ThreadConfined(ANY) public @Nullable EventEmitterWrapper getEventEmitter(int reactTag) { ViewState viewState = getNullableViewState(reactTag); return viewState == null ? null : viewState.mEventEmitter; } /** * This class holds view state for react tags. Objects of this class are stored into the {@link * #mTagToViewState}, and they should be updated in the same thread. */ private static class ViewState { @Nullable final View mView; final int mReactTag; final boolean mIsRoot; @Nullable final ViewManager mViewManager; @Nullable public ReactStylesDiffMap mCurrentProps = null; @Nullable public ReadableMap mCurrentLocalData = null; @Nullable public ReadableMap mCurrentState = null; @Nullable public EventEmitterWrapper mEventEmitter = null; private ViewState(int reactTag, @Nullable View view, @Nullable ViewManager viewManager) { this(reactTag, view, viewManager, false); } private ViewState(int reactTag, @Nullable View view, ViewManager viewManager, boolean isRoot) { mReactTag = reactTag; mView = view; mIsRoot = isRoot; mViewManager = viewManager; } @Override public String toString() { boolean isLayoutOnly = mViewManager == null; return "ViewState [" + mReactTag + "] - isRoot: " + mIsRoot + " - props: " + mCurrentProps + " - localData: " + mCurrentLocalData + " - viewManager: " + mViewManager + " - isLayoutOnly: " + isLayoutOnly; } } }
`deleteRootView`: use concurrent pattern with `mTagToViewState` Summary: Instead of doing a "containsKey then get", just get the rootViewTag and see if it's non-null. Theoretically, since it's a concurrent data-structure, it could be removed from the ConcurrentHashMap between "containsKey" returning true and the "get". This does not fix any known, existing problems. Changelog: [Internal] Reviewed By: mdvacca Differential Revision: D25378703 fbshipit-source-id: 62a44e68e4443dac5a557263cc4bb33de9eea993
ReactAndroid/src/main/java/com/facebook/react/fabric/mounting/MountingManager.java
`deleteRootView`: use concurrent pattern with `mTagToViewState`
Java
mit
c5762fa475bfd8ffc3fd66ebb0d246675b1d98d0
0
johnjohndoe/AntennaPod,drabux/AntennaPod,udif/AntennaPod,udif/AntennaPod,drabux/AntennaPod,domingos86/AntennaPod,TimB0/AntennaPod,TimB0/AntennaPod,mfietz/AntennaPod,twiceyuan/AntennaPod,domingos86/AntennaPod,johnjohndoe/AntennaPod,TimB0/AntennaPod,twiceyuan/AntennaPod,twiceyuan/AntennaPod,mfietz/AntennaPod,domingos86/AntennaPod,orelogo/AntennaPod,orelogo/AntennaPod,orelogo/AntennaPod,johnjohndoe/AntennaPod,orelogo/AntennaPod,drabux/AntennaPod,udif/AntennaPod,drabux/AntennaPod,domingos86/AntennaPod,johnjohndoe/AntennaPod,mfietz/AntennaPod,udif/AntennaPod,twiceyuan/AntennaPod,mfietz/AntennaPod,TimB0/AntennaPod
package de.danoeh.antennapod.activity; import android.annotation.TargetApi; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.PixelFormat; import android.media.AudioManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.support.v4.view.ViewCompat; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.CheckBox; import android.widget.ImageButton; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.TextView; import android.widget.Toast; import com.afollestad.materialdialogs.MaterialDialog; import com.bumptech.glide.Glide; import com.joanzapata.iconify.IconDrawable; import com.joanzapata.iconify.fonts.FontAwesomeIcons; import de.danoeh.antennapod.R; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.service.playback.PlaybackService; import de.danoeh.antennapod.core.storage.DBReader; import de.danoeh.antennapod.core.storage.DBTasks; import de.danoeh.antennapod.core.storage.DBWriter; import de.danoeh.antennapod.core.util.Converter; import de.danoeh.antennapod.core.util.ShareUtils; import de.danoeh.antennapod.core.util.StorageUtils; import de.danoeh.antennapod.core.util.playback.MediaPlayerError; import de.danoeh.antennapod.core.util.playback.Playable; import de.danoeh.antennapod.core.util.playback.PlaybackController; import de.danoeh.antennapod.dialog.SleepTimerDialog; import de.danoeh.antennapod.dialog.VariableSpeedDialog; import rx.Observable; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; /** * Provides general features which are both needed for playing audio and video * files. */ public abstract class MediaplayerActivity extends AppCompatActivity implements OnSeekBarChangeListener { private static final String TAG = "MediaplayerActivity"; private static final String PREFS = "MediaPlayerActivityPreferences"; private static final String PREF_SHOW_TIME_LEFT = "showTimeLeft"; protected PlaybackController controller; protected TextView txtvPosition; protected TextView txtvLength; protected SeekBar sbPosition; protected Button butPlaybackSpeed; protected ImageButton butRev; protected TextView txtvRev; protected ImageButton butPlay; protected ImageButton butFF; protected TextView txtvFF; protected ImageButton butSkip; protected boolean showTimeLeft = false; private boolean isFavorite = false; private PlaybackController newPlaybackController() { return new PlaybackController(this, false) { @Override public void setupGUI() { MediaplayerActivity.this.setupGUI(); } @Override public void onPositionObserverUpdate() { MediaplayerActivity.this.onPositionObserverUpdate(); } @Override public void onBufferStart() { MediaplayerActivity.this.onBufferStart(); } @Override public void onBufferEnd() { MediaplayerActivity.this.onBufferEnd(); } @Override public void onBufferUpdate(float progress) { MediaplayerActivity.this.onBufferUpdate(progress); } @Override public void handleError(int code) { MediaplayerActivity.this.handleError(code); } @Override public void onReloadNotification(int code) { MediaplayerActivity.this.onReloadNotification(code); } @Override public void onSleepTimerUpdate() { supportInvalidateOptionsMenu(); } @Override public ImageButton getPlayButton() { return butPlay; } @Override public void postStatusMsg(int msg) { MediaplayerActivity.this.postStatusMsg(msg); } @Override public void clearStatusMsg() { MediaplayerActivity.this.clearStatusMsg(); } @Override public boolean loadMediaInfo() { return MediaplayerActivity.this.loadMediaInfo(); } @Override public void onAwaitingVideoSurface() { MediaplayerActivity.this.onAwaitingVideoSurface(); } @Override public void onServiceQueried() { MediaplayerActivity.this.onServiceQueried(); } @Override public void onShutdownNotification() { finish(); } @Override public void onPlaybackEnd() { finish(); } @Override public void onPlaybackSpeedChange() { MediaplayerActivity.this.onPlaybackSpeedChange(); } @Override protected void setScreenOn(boolean enable) { super.setScreenOn(enable); MediaplayerActivity.this.setScreenOn(enable); } }; } protected void onPlaybackSpeedChange() { updateButPlaybackSpeed(); } protected void onServiceQueried() { supportInvalidateOptionsMenu(); } protected void chooseTheme() { setTheme(UserPreferences.getTheme()); } protected void setScreenOn(boolean enable) { } @Override protected void onCreate(Bundle savedInstanceState) { chooseTheme(); super.onCreate(savedInstanceState); Log.d(TAG, "onCreate()"); StorageUtils.checkStorageAvailability(this); setVolumeControlStream(AudioManager.STREAM_MUSIC); orientation = getResources().getConfiguration().orientation; getWindow().setFormat(PixelFormat.TRANSPARENT); } @Override protected void onPause() { super.onPause(); controller.reinitServiceIfPaused(); controller.pause(); } /** * Should be used to switch to another player activity if the mime type is * not the correct one for the current activity. */ protected abstract void onReloadNotification(int notificationCode); /** * Should be used to inform the user that the PlaybackService is currently * buffering. */ protected abstract void onBufferStart(); /** * Should be used to hide the view that was showing the 'buffering'-message. */ protected abstract void onBufferEnd(); protected void onBufferUpdate(float progress) { if (sbPosition != null) { sbPosition.setSecondaryProgress((int) progress * sbPosition.getMax()); } } /** * Current screen orientation. */ protected int orientation; @Override protected void onStart() { super.onStart(); if (controller != null) { controller.release(); } controller = newPlaybackController(); } @Override protected void onStop() { super.onStop(); Log.d(TAG, "onStop()"); if (controller != null) { controller.release(); } } @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onTrimMemory(int level) { super.onTrimMemory(level); Glide.get(this).trimMemory(level); } @Override public void onLowMemory() { super.onLowMemory(); Glide.get(this).clearMemory(); } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.mediaplayer, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); if (controller == null) { return false; } Playable media = controller.getMedia(); menu.findItem(R.id.support_item).setVisible( media != null && media.getPaymentLink() != null && (media instanceof FeedMedia) && ((FeedMedia) media).getItem() != null && ((FeedMedia) media).getItem().getFlattrStatus().flattrable() ); boolean hasWebsiteLink = media != null && media.getWebsiteLink() != null; menu.findItem(R.id.visit_website_item).setVisible(hasWebsiteLink); boolean isItemAndHasLink = media != null && (media instanceof FeedMedia) && ((FeedMedia) media).getItem() != null && ((FeedMedia) media).getItem().getLink() != null; menu.findItem(R.id.share_link_item).setVisible(isItemAndHasLink); menu.findItem(R.id.share_link_with_position_item).setVisible(isItemAndHasLink); boolean isItemHasDownloadLink = media != null && (media instanceof FeedMedia) && ((FeedMedia) media).getDownload_url() != null; menu.findItem(R.id.share_download_url_item).setVisible(isItemHasDownloadLink); menu.findItem(R.id.share_download_url_with_position_item).setVisible(isItemHasDownloadLink); menu.findItem(R.id.share_item).setVisible(hasWebsiteLink || isItemAndHasLink || isItemHasDownloadLink); menu.findItem(R.id.add_to_favorites_item).setVisible(false); menu.findItem(R.id.remove_from_favorites_item).setVisible(false); if(media != null && media instanceof FeedMedia) { menu.findItem(R.id.add_to_favorites_item).setVisible(!isFavorite); menu.findItem(R.id.remove_from_favorites_item).setVisible(isFavorite); } boolean sleepTimerSet = controller.sleepTimerActive(); boolean sleepTimerNotSet = controller.sleepTimerNotActive(); menu.findItem(R.id.set_sleeptimer_item).setVisible(sleepTimerNotSet); menu.findItem(R.id.disable_sleeptimer_item).setVisible(sleepTimerSet); if (this instanceof AudioplayerActivity) { int[] attrs = {R.attr.action_bar_icon_color}; TypedArray ta = obtainStyledAttributes(UserPreferences.getTheme(), attrs); int textColor = ta.getColor(0, Color.GRAY); ta.recycle(); menu.findItem(R.id.audio_controls).setIcon(new IconDrawable(this, FontAwesomeIcons.fa_sliders).color(textColor).actionBarSize()); } else { menu.findItem(R.id.audio_controls).setVisible(false); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { if (controller == null) { return false; } Playable media = controller.getMedia(); if (item.getItemId() == android.R.id.home) { Intent intent = new Intent(MediaplayerActivity.this, MainActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); return true; } else { if (media != null) { switch (item.getItemId()) { case R.id.add_to_favorites_item: if(media instanceof FeedMedia) { FeedItem feedItem = ((FeedMedia)media).getItem(); if(feedItem != null) { DBWriter.addFavoriteItem(feedItem); isFavorite = true; invalidateOptionsMenu(); Toast.makeText(this, R.string.added_to_favorites, Toast.LENGTH_SHORT) .show(); } } break; case R.id.remove_from_favorites_item: if(media instanceof FeedMedia) { FeedItem feedItem = ((FeedMedia)media).getItem(); if(feedItem != null) { DBWriter.removeFavoriteItem(feedItem); isFavorite = false; invalidateOptionsMenu(); Toast.makeText(this, R.string.removed_from_favorites, Toast.LENGTH_SHORT) .show(); } } break; case R.id.disable_sleeptimer_item: if (controller.serviceAvailable()) { MaterialDialog.Builder stDialog = new MaterialDialog.Builder(this); stDialog.title(R.string.sleep_timer_label); stDialog.content(getString(R.string.time_left_label) + Converter.getDurationStringLong((int) controller .getSleepTimerTimeLeft())); stDialog.positiveText(R.string.disable_sleeptimer_label); stDialog.negativeText(R.string.cancel_label); stDialog.callback(new MaterialDialog.ButtonCallback() { @Override public void onPositive(MaterialDialog dialog) { dialog.dismiss(); controller.disableSleepTimer(); } @Override public void onNegative(MaterialDialog dialog) { dialog.dismiss(); } }); stDialog.build().show(); } break; case R.id.set_sleeptimer_item: if (controller.serviceAvailable()) { SleepTimerDialog td = new SleepTimerDialog(this) { @Override public void onTimerSet(long millis, boolean shakeToReset, boolean vibrate) { controller.setSleepTimer(millis, shakeToReset, vibrate); } }; td.createNewDialog().show(); } break; case R.id.audio_controls: MaterialDialog dialog = new MaterialDialog.Builder(this) .title(R.string.audio_controls) .customView(R.layout.audio_controls, true) .neutralText(R.string.close_label) .onNeutral((dialog1, which) -> { final SeekBar left = (SeekBar) dialog1.findViewById(R.id.volume_left); final SeekBar right = (SeekBar) dialog1.findViewById(R.id.volume_right); UserPreferences.setVolume(left.getProgress(), right.getProgress()); }) .show(); final SeekBar barPlaybackSpeed = (SeekBar) dialog.findViewById(R.id.playback_speed); final Button butDecSpeed = (Button) dialog.findViewById(R.id.butDecSpeed); butDecSpeed.setOnClickListener(v -> { if(controller != null && controller.canSetPlaybackSpeed()) { barPlaybackSpeed.setProgress(barPlaybackSpeed.getProgress() - 2); } else { VariableSpeedDialog.showGetPluginDialog(this); } }); final Button butIncSpeed = (Button) dialog.findViewById(R.id.butIncSpeed); butIncSpeed.setOnClickListener(v -> { if(controller != null && controller.canSetPlaybackSpeed()) { barPlaybackSpeed.setProgress(barPlaybackSpeed.getProgress() + 2); } else { VariableSpeedDialog.showGetPluginDialog(this); } }); final TextView txtvPlaybackSpeed = (TextView) dialog.findViewById(R.id.txtvPlaybackSpeed); float currentSpeed = 1.0f; try { currentSpeed = Float.parseFloat(UserPreferences.getPlaybackSpeed()); } catch (NumberFormatException e) { Log.e(TAG, Log.getStackTraceString(e)); UserPreferences.setPlaybackSpeed(String.valueOf(currentSpeed)); } txtvPlaybackSpeed.setText(String.format("%.2fx", currentSpeed)); barPlaybackSpeed.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if(controller != null && controller.canSetPlaybackSpeed()) { float playbackSpeed = (progress + 10) / 20.0f; controller.setPlaybackSpeed(playbackSpeed); String speed = String.format("%.2f", playbackSpeed); UserPreferences.setPlaybackSpeed(speed); txtvPlaybackSpeed.setText(speed + "x"); } else if(fromUser) { float speed = Float.valueOf(UserPreferences.getPlaybackSpeed()); barPlaybackSpeed.post(() -> { barPlaybackSpeed.setProgress((int) (20 * speed) - 10); }); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { if(controller != null && !controller.canSetPlaybackSpeed()) { VariableSpeedDialog.showGetPluginDialog(MediaplayerActivity.this); } } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); barPlaybackSpeed.setProgress((int) (20 * currentSpeed) - 10); final SeekBar barLeftVolume = (SeekBar) dialog.findViewById(R.id.volume_left); barLeftVolume.setProgress(100); final SeekBar barRightVolume = (SeekBar) dialog.findViewById(R.id.volume_right); barRightVolume.setProgress(100); final CheckBox stereoToMono = (CheckBox) dialog.findViewById(R.id.stereo_to_mono); stereoToMono.setChecked(UserPreferences.stereoToMono()); if (controller != null && !controller.canDownmix()) { stereoToMono.setEnabled(false); String sonicOnly = getString(R.string.sonic_only); stereoToMono.setText(stereoToMono.getText() + " [" + sonicOnly + "]"); } barLeftVolume.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { float leftVolume = 1.0f, rightVolume = 1.0f; if (progress < 100) { leftVolume = progress / 100.0f; } if (barRightVolume.getProgress() < 100) { rightVolume = barRightVolume.getProgress() / 100.0f; } controller.setVolume(leftVolume, rightVolume); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); barRightVolume.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { float leftVolume = 1.0f, rightVolume = 1.0f; if (progress < 100) { rightVolume = progress / 100.0f; } if (barLeftVolume.getProgress() < 100) { leftVolume = barLeftVolume.getProgress() / 100.0f; } controller.setVolume(leftVolume, rightVolume); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); stereoToMono.setOnCheckedChangeListener((buttonView, isChecked) -> { UserPreferences.stereoToMono(isChecked); if (controller != null) { controller.setDownmix(isChecked); } }); break; case R.id.visit_website_item: Uri uri = Uri.parse(media.getWebsiteLink()); startActivity(new Intent(Intent.ACTION_VIEW, uri)); break; case R.id.support_item: if (media instanceof FeedMedia) { DBTasks.flattrItemIfLoggedIn(this, ((FeedMedia) media).getItem()); } break; case R.id.share_link_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemLink(this, ((FeedMedia) media).getItem()); } break; case R.id.share_download_url_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemDownloadLink(this, ((FeedMedia) media).getItem()); } break; case R.id.share_link_with_position_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemLink(this, ((FeedMedia) media).getItem(), true); } break; case R.id.share_download_url_with_position_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemDownloadLink(this, ((FeedMedia) media).getItem(), true); } break; default: return false; } return true; } else { return false; } } } @Override protected void onResume() { super.onResume(); Log.d(TAG, "onResume()"); StorageUtils.checkStorageAvailability(this); controller.init(); } /** * Called by 'handleStatus()' when the PlaybackService is waiting for * a video surface. */ protected abstract void onAwaitingVideoSurface(); protected abstract void postStatusMsg(int resId); protected abstract void clearStatusMsg(); protected void onPositionObserverUpdate() { if (controller != null) { int currentPosition = controller.getPosition(); int duration = controller.getDuration(); Log.d(TAG, "currentPosition " + Converter .getDurationStringLong(currentPosition)); if (currentPosition != PlaybackService.INVALID_TIME && duration != PlaybackService.INVALID_TIME && controller.getMedia() != null) { txtvPosition.setText(Converter .getDurationStringLong(currentPosition)); if (showTimeLeft) { txtvLength.setText("-" + Converter .getDurationStringLong(duration - currentPosition)); } else { txtvLength.setText(Converter .getDurationStringLong(duration)); } updateProgressbarPosition(currentPosition, duration); } else { Log.w(TAG, "Could not react to position observer update because of invalid time"); } } } private void updateProgressbarPosition(int position, int duration) { Log.d(TAG, "updateProgressbarPosition(" + position + ", " + duration + ")"); float progress = ((float) position) / duration; sbPosition.setProgress((int) (progress * sbPosition.getMax())); } /** * Load information about the media that is going to be played or currently * being played. This method will be called when the activity is connected * to the PlaybackService to ensure that the activity has the right * FeedMedia object. */ protected boolean loadMediaInfo() { Log.d(TAG, "loadMediaInfo()"); Playable media = controller.getMedia(); SharedPreferences prefs = getSharedPreferences(PREFS, MODE_PRIVATE); showTimeLeft = prefs.getBoolean(PREF_SHOW_TIME_LEFT, false); if (media != null) { txtvPosition.setText(Converter.getDurationStringLong((media.getPosition()))); if (media.getDuration() != 0) { txtvLength.setText(Converter.getDurationStringLong(media.getDuration())); float progress = ((float) media.getPosition()) / media.getDuration(); sbPosition.setProgress((int) (progress * sbPosition.getMax())); if (showTimeLeft) { int timeLeft = media.getDuration() - media.getPosition(); txtvLength.setText("-" + Converter.getDurationStringLong(timeLeft)); } } checkFavorite(); if(butPlaybackSpeed != null) { if (controller == null) { butPlaybackSpeed.setVisibility(View.GONE); } else { butPlaybackSpeed.setVisibility(View.VISIBLE); if (controller.canSetPlaybackSpeed()) { ViewCompat.setAlpha(butPlaybackSpeed, 1.0f); } else { ViewCompat.setAlpha(butPlaybackSpeed, 0.5f); } } updateButPlaybackSpeed(); } return true; } else { return false; } } protected void setupGUI() { setContentView(getContentViewResourceId()); sbPosition = (SeekBar) findViewById(R.id.sbPosition); txtvPosition = (TextView) findViewById(R.id.txtvPosition); SharedPreferences prefs = getSharedPreferences(PREFS, MODE_PRIVATE); showTimeLeft = prefs.getBoolean(PREF_SHOW_TIME_LEFT, false); Log.d("timeleft", showTimeLeft ? "true" : "false"); txtvLength = (TextView) findViewById(R.id.txtvLength); txtvLength.setOnClickListener(v -> { showTimeLeft = !showTimeLeft; Playable media = controller.getMedia(); if (media == null) { return; } String length; if (showTimeLeft) { length = "-" + Converter.getDurationStringLong(media.getDuration() - media.getPosition()); } else { length = Converter.getDurationStringLong(media.getDuration()); } txtvLength.setText(length); SharedPreferences.Editor editor = prefs.edit(); editor.putBoolean(PREF_SHOW_TIME_LEFT, showTimeLeft); editor.apply(); Log.d("timeleft on click", showTimeLeft ? "true" : "false"); }); butPlaybackSpeed = (Button) findViewById(R.id.butPlaybackSpeed); butRev = (ImageButton) findViewById(R.id.butRev); txtvRev = (TextView) findViewById(R.id.txtvRev); if (txtvRev != null) { txtvRev.setText(String.valueOf(UserPreferences.getRewindSecs())); } butPlay = (ImageButton) findViewById(R.id.butPlay); butFF = (ImageButton) findViewById(R.id.butFF); txtvFF = (TextView) findViewById(R.id.txtvFF); if (txtvFF != null) { txtvFF.setText(String.valueOf(UserPreferences.getFastFowardSecs())); } butSkip = (ImageButton) findViewById(R.id.butSkip); // SEEKBAR SETUP sbPosition.setOnSeekBarChangeListener(this); // BUTTON SETUP if(butPlaybackSpeed != null) { butPlaybackSpeed.setOnClickListener(v -> { if (controller == null) { return; } if (controller.canSetPlaybackSpeed()) { String[] availableSpeeds = UserPreferences.getPlaybackSpeedArray(); String currentSpeed = UserPreferences.getPlaybackSpeed(); // Provide initial value in case the speed list has changed // out from under us // and our current speed isn't in the new list String newSpeed; if (availableSpeeds.length > 0) { newSpeed = availableSpeeds[0]; } else { newSpeed = "1.00"; } for (int i = 0; i < availableSpeeds.length; i++) { if (availableSpeeds[i].equals(currentSpeed)) { if (i == availableSpeeds.length - 1) { newSpeed = availableSpeeds[0]; } else { newSpeed = availableSpeeds[i + 1]; } break; } } UserPreferences.setPlaybackSpeed(newSpeed); controller.setPlaybackSpeed(Float.parseFloat(newSpeed)); } else { VariableSpeedDialog.showGetPluginDialog(this); } }); butPlaybackSpeed.setOnLongClickListener(v -> { VariableSpeedDialog.showDialog(this); return true; }); } if (butRev != null) { butRev.setOnClickListener(v -> { int curr = controller.getPosition(); controller.seekTo(curr - UserPreferences.getRewindSecs() * 1000); }); butRev.setOnLongClickListener(new View.OnLongClickListener() { int choice; @Override public boolean onLongClick(View v) { int checked = 0; int rewindSecs = UserPreferences.getRewindSecs(); final int[] values = getResources().getIntArray(R.array.seek_delta_values); final String[] choices = new String[values.length]; for (int i = 0; i < values.length; i++) { if (rewindSecs == values[i]) { checked = i; } choices[i] = String.valueOf(values[i]) + " " + getString(R.string.time_seconds); } choice = values[checked]; AlertDialog.Builder builder = new AlertDialog.Builder(MediaplayerActivity.this); builder.setTitle(R.string.pref_rewind); builder.setSingleChoiceItems(choices, checked, (dialog, which) -> { choice = values[which]; }); builder.setNegativeButton(R.string.cancel_label, null); builder.setPositiveButton(R.string.confirm_label, (dialog, which) -> { UserPreferences.setPrefRewindSecs(choice); if(txtvRev != null){ txtvRev.setText(String.valueOf(choice)); } }); builder.create().show(); return true; } }); } butPlay.setOnClickListener(controller.newOnPlayButtonClickListener()); if (butFF != null) { butFF.setOnClickListener(v -> { int curr = controller.getPosition(); controller.seekTo(curr + UserPreferences.getFastFowardSecs() * 1000); }); butFF.setOnLongClickListener(new View.OnLongClickListener() { int choice; @Override public boolean onLongClick(View v) { int checked = 0; int rewindSecs = UserPreferences.getFastFowardSecs(); final int[] values = getResources().getIntArray(R.array.seek_delta_values); final String[] choices = new String[values.length]; for (int i = 0; i < values.length; i++) { if (rewindSecs == values[i]) { checked = i; } choices[i] = String.valueOf(values[i]) + " " + getString(R.string.time_seconds); } choice = values[checked]; AlertDialog.Builder builder = new AlertDialog.Builder(MediaplayerActivity.this); builder.setTitle(R.string.pref_fast_forward); builder.setSingleChoiceItems(choices, checked, (dialog, which) -> { choice = values[which]; }); builder.setNegativeButton(R.string.cancel_label, null); builder.setPositiveButton(R.string.confirm_label, (dialog, which) -> { UserPreferences.setPrefFastForwardSecs(choice); if(txtvFF != null) { txtvFF.setText(String.valueOf(choice)); } }); builder.create().show(); return true; } }); } if (butSkip != null) { butSkip.setOnClickListener(v -> { sendBroadcast(new Intent(PlaybackService.ACTION_SKIP_CURRENT_EPISODE)); }); } } protected abstract int getContentViewResourceId(); void handleError(int errorCode) { final AlertDialog.Builder errorDialog = new AlertDialog.Builder(this); errorDialog.setTitle(R.string.error_label); errorDialog .setMessage(MediaPlayerError.getErrorString(this, errorCode)); errorDialog.setNeutralButton("OK", (dialog, which) -> { dialog.dismiss(); finish(); } ); errorDialog.create().show(); } float prog; @Override public void onProgressChanged (SeekBar seekBar,int progress, boolean fromUser) { if (controller != null) { prog = controller.onSeekBarProgressChanged(seekBar, progress, fromUser, txtvPosition); if (showTimeLeft && prog != 0) { int duration = controller.getDuration(); String length = "-" + Converter.getDurationStringLong(duration - (int) (prog * duration)); txtvLength.setText(length); } } } private void updateButPlaybackSpeed() { if (controller != null && butPlaybackSpeed != null) { butPlaybackSpeed.setText(UserPreferences.getPlaybackSpeed() + "x"); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { if (controller != null) { controller.onSeekBarStartTrackingTouch(seekBar); } } @Override public void onStopTrackingTouch(SeekBar seekBar) { if (controller != null) { controller.onSeekBarStopTrackingTouch(seekBar, prog); } } private void checkFavorite() { Playable playable = controller.getMedia(); if (playable != null && playable instanceof FeedMedia) { FeedItem feedItem = ((FeedMedia) playable).getItem(); if (feedItem != null) { Observable.fromCallable(() -> DBReader.getFeedItem(feedItem.getId())) .subscribeOn(Schedulers.newThread()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(item -> { boolean isFav = item.isTagged(FeedItem.TAG_FAVORITE); if(isFavorite != isFav) { isFavorite = isFav; invalidateOptionsMenu(); } }); } } } }
app/src/main/java/de/danoeh/antennapod/activity/MediaplayerActivity.java
package de.danoeh.antennapod.activity; import android.annotation.TargetApi; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.PixelFormat; import android.media.AudioManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.support.v4.view.ViewCompat; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.CheckBox; import android.widget.ImageButton; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.TextView; import android.widget.Toast; import com.afollestad.materialdialogs.MaterialDialog; import com.bumptech.glide.Glide; import com.joanzapata.iconify.IconDrawable; import com.joanzapata.iconify.fonts.FontAwesomeIcons; import de.danoeh.antennapod.R; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.service.playback.PlaybackService; import de.danoeh.antennapod.core.storage.DBReader; import de.danoeh.antennapod.core.storage.DBTasks; import de.danoeh.antennapod.core.storage.DBWriter; import de.danoeh.antennapod.core.util.Converter; import de.danoeh.antennapod.core.util.ShareUtils; import de.danoeh.antennapod.core.util.StorageUtils; import de.danoeh.antennapod.core.util.playback.MediaPlayerError; import de.danoeh.antennapod.core.util.playback.Playable; import de.danoeh.antennapod.core.util.playback.PlaybackController; import de.danoeh.antennapod.dialog.SleepTimerDialog; import de.danoeh.antennapod.dialog.VariableSpeedDialog; import rx.Observable; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; /** * Provides general features which are both needed for playing audio and video * files. */ public abstract class MediaplayerActivity extends AppCompatActivity implements OnSeekBarChangeListener { private static final String TAG = "MediaplayerActivity"; private static final String PREFS = "MediaPlayerActivityPreferences"; private static final String PREF_SHOW_TIME_LEFT = "showTimeLeft"; protected PlaybackController controller; protected TextView txtvPosition; protected TextView txtvLength; protected SeekBar sbPosition; protected Button butPlaybackSpeed; protected ImageButton butRev; protected TextView txtvRev; protected ImageButton butPlay; protected ImageButton butFF; protected TextView txtvFF; protected ImageButton butSkip; protected boolean showTimeLeft = false; private boolean isFavorite = false; private PlaybackController newPlaybackController() { return new PlaybackController(this, false) { @Override public void setupGUI() { MediaplayerActivity.this.setupGUI(); } @Override public void onPositionObserverUpdate() { MediaplayerActivity.this.onPositionObserverUpdate(); } @Override public void onBufferStart() { MediaplayerActivity.this.onBufferStart(); } @Override public void onBufferEnd() { MediaplayerActivity.this.onBufferEnd(); } @Override public void onBufferUpdate(float progress) { MediaplayerActivity.this.onBufferUpdate(progress); } @Override public void handleError(int code) { MediaplayerActivity.this.handleError(code); } @Override public void onReloadNotification(int code) { MediaplayerActivity.this.onReloadNotification(code); } @Override public void onSleepTimerUpdate() { supportInvalidateOptionsMenu(); } @Override public ImageButton getPlayButton() { return butPlay; } @Override public void postStatusMsg(int msg) { MediaplayerActivity.this.postStatusMsg(msg); } @Override public void clearStatusMsg() { MediaplayerActivity.this.clearStatusMsg(); } @Override public boolean loadMediaInfo() { return MediaplayerActivity.this.loadMediaInfo(); } @Override public void onAwaitingVideoSurface() { MediaplayerActivity.this.onAwaitingVideoSurface(); } @Override public void onServiceQueried() { MediaplayerActivity.this.onServiceQueried(); } @Override public void onShutdownNotification() { finish(); } @Override public void onPlaybackEnd() { finish(); } @Override public void onPlaybackSpeedChange() { MediaplayerActivity.this.onPlaybackSpeedChange(); } @Override protected void setScreenOn(boolean enable) { super.setScreenOn(enable); MediaplayerActivity.this.setScreenOn(enable); } }; } protected void onPlaybackSpeedChange() { updateButPlaybackSpeed(); } protected void onServiceQueried() { supportInvalidateOptionsMenu(); } protected void chooseTheme() { setTheme(UserPreferences.getTheme()); } protected void setScreenOn(boolean enable) { } @Override protected void onCreate(Bundle savedInstanceState) { chooseTheme(); super.onCreate(savedInstanceState); Log.d(TAG, "onCreate()"); StorageUtils.checkStorageAvailability(this); setVolumeControlStream(AudioManager.STREAM_MUSIC); orientation = getResources().getConfiguration().orientation; getWindow().setFormat(PixelFormat.TRANSPARENT); } @Override protected void onPause() { super.onPause(); controller.reinitServiceIfPaused(); controller.pause(); } /** * Should be used to switch to another player activity if the mime type is * not the correct one for the current activity. */ protected abstract void onReloadNotification(int notificationCode); /** * Should be used to inform the user that the PlaybackService is currently * buffering. */ protected abstract void onBufferStart(); /** * Should be used to hide the view that was showing the 'buffering'-message. */ protected abstract void onBufferEnd(); protected void onBufferUpdate(float progress) { if (sbPosition != null) { sbPosition.setSecondaryProgress((int) progress * sbPosition.getMax()); } } /** * Current screen orientation. */ protected int orientation; @Override protected void onStart() { super.onStart(); if (controller != null) { controller.release(); } controller = newPlaybackController(); } @Override protected void onStop() { super.onStop(); Log.d(TAG, "onStop()"); if (controller != null) { controller.release(); } } @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onTrimMemory(int level) { super.onTrimMemory(level); Glide.get(this).trimMemory(level); } @Override public void onLowMemory() { super.onLowMemory(); Glide.get(this).clearMemory(); } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.mediaplayer, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); if (controller == null) { return false; } Playable media = controller.getMedia(); menu.findItem(R.id.support_item).setVisible( media != null && media.getPaymentLink() != null && (media instanceof FeedMedia) && ((FeedMedia) media).getItem() != null && ((FeedMedia) media).getItem().getFlattrStatus().flattrable() ); boolean hasWebsiteLink = media != null && media.getWebsiteLink() != null; menu.findItem(R.id.visit_website_item).setVisible(hasWebsiteLink); boolean isItemAndHasLink = media != null && (media instanceof FeedMedia) && ((FeedMedia) media).getItem() != null && ((FeedMedia) media).getItem().getLink() != null; menu.findItem(R.id.share_link_item).setVisible(isItemAndHasLink); menu.findItem(R.id.share_link_with_position_item).setVisible(isItemAndHasLink); boolean isItemHasDownloadLink = media != null && (media instanceof FeedMedia) && ((FeedMedia) media).getDownload_url() != null; menu.findItem(R.id.share_download_url_item).setVisible(isItemHasDownloadLink); menu.findItem(R.id.share_download_url_with_position_item).setVisible(isItemHasDownloadLink); menu.findItem(R.id.share_item).setVisible(hasWebsiteLink || isItemAndHasLink || isItemHasDownloadLink); menu.findItem(R.id.add_to_favorites_item).setVisible(false); menu.findItem(R.id.remove_from_favorites_item).setVisible(false); if(media != null && media instanceof FeedMedia) { menu.findItem(R.id.add_to_favorites_item).setVisible(!isFavorite); menu.findItem(R.id.remove_from_favorites_item).setVisible(isFavorite); } boolean sleepTimerSet = controller.sleepTimerActive(); boolean sleepTimerNotSet = controller.sleepTimerNotActive(); menu.findItem(R.id.set_sleeptimer_item).setVisible(sleepTimerNotSet); menu.findItem(R.id.disable_sleeptimer_item).setVisible(sleepTimerSet); if (this instanceof AudioplayerActivity) { int[] attrs = {R.attr.action_bar_icon_color}; TypedArray ta = obtainStyledAttributes(UserPreferences.getTheme(), attrs); int textColor = ta.getColor(0, Color.GRAY); ta.recycle(); menu.findItem(R.id.audio_controls).setIcon(new IconDrawable(this, FontAwesomeIcons.fa_sliders).color(textColor).actionBarSize()); } else { menu.findItem(R.id.audio_controls).setVisible(false); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { if (controller == null) { return false; } Playable media = controller.getMedia(); if (item.getItemId() == android.R.id.home) { Intent intent = new Intent(MediaplayerActivity.this, MainActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); return true; } else { if (media != null) { switch (item.getItemId()) { case R.id.add_to_favorites_item: if(media instanceof FeedMedia) { FeedItem feedItem = ((FeedMedia)media).getItem(); if(feedItem != null) { DBWriter.addFavoriteItem(feedItem); isFavorite = true; invalidateOptionsMenu(); Toast.makeText(this, R.string.added_to_favorites, Toast.LENGTH_SHORT) .show(); } } break; case R.id.remove_from_favorites_item: if(media instanceof FeedMedia) { FeedItem feedItem = ((FeedMedia)media).getItem(); if(feedItem != null) { DBWriter.removeFavoriteItem(feedItem); isFavorite = false; invalidateOptionsMenu(); Toast.makeText(this, R.string.removed_from_favorites, Toast.LENGTH_SHORT) .show(); } } break; case R.id.disable_sleeptimer_item: if (controller.serviceAvailable()) { MaterialDialog.Builder stDialog = new MaterialDialog.Builder(this); stDialog.title(R.string.sleep_timer_label); stDialog.content(getString(R.string.time_left_label) + Converter.getDurationStringLong((int) controller .getSleepTimerTimeLeft())); stDialog.positiveText(R.string.disable_sleeptimer_label); stDialog.negativeText(R.string.cancel_label); stDialog.callback(new MaterialDialog.ButtonCallback() { @Override public void onPositive(MaterialDialog dialog) { dialog.dismiss(); controller.disableSleepTimer(); } @Override public void onNegative(MaterialDialog dialog) { dialog.dismiss(); } }); stDialog.build().show(); } break; case R.id.set_sleeptimer_item: if (controller.serviceAvailable()) { SleepTimerDialog td = new SleepTimerDialog(this) { @Override public void onTimerSet(long millis, boolean shakeToReset, boolean vibrate) { controller.setSleepTimer(millis, shakeToReset, vibrate); } }; td.createNewDialog().show(); } break; case R.id.audio_controls: MaterialDialog dialog = new MaterialDialog.Builder(this) .title(R.string.audio_controls) .customView(R.layout.audio_controls, true) .neutralText(R.string.close_label) .onNeutral((dialog1, which) -> { final SeekBar left = (SeekBar) dialog1.findViewById(R.id.volume_left); final SeekBar right = (SeekBar) dialog1.findViewById(R.id.volume_right); UserPreferences.setVolume(left.getProgress(), right.getProgress()); }) .show(); final SeekBar barPlaybackSpeed = (SeekBar) dialog.findViewById(R.id.playback_speed); final Button butDecSpeed = (Button) dialog.findViewById(R.id.butDecSpeed); butDecSpeed.setOnClickListener(v -> { if(controller != null && controller.canSetPlaybackSpeed()) { barPlaybackSpeed.setProgress(barPlaybackSpeed.getProgress() - 2); } else { VariableSpeedDialog.showGetPluginDialog(this); } }); final Button butIncSpeed = (Button) dialog.findViewById(R.id.butIncSpeed); butIncSpeed.setOnClickListener(v -> { if(controller != null && controller.canSetPlaybackSpeed()) { barPlaybackSpeed.setProgress(barPlaybackSpeed.getProgress() + 2); } else { VariableSpeedDialog.showGetPluginDialog(this); } }); final TextView txtvPlaybackSpeed = (TextView) dialog.findViewById(R.id.txtvPlaybackSpeed); float currentSpeed = 1.0f; try { currentSpeed = Float.parseFloat(UserPreferences.getPlaybackSpeed()); } catch (NumberFormatException e) { Log.e(TAG, Log.getStackTraceString(e)); UserPreferences.setPlaybackSpeed(String.valueOf(currentSpeed)); } txtvPlaybackSpeed.setText(String.format("%.2fx", currentSpeed)); barPlaybackSpeed.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if(controller != null && controller.canSetPlaybackSpeed()) { float playbackSpeed = (progress + 10) / 20.0f; controller.setPlaybackSpeed(playbackSpeed); String speed = String.format("%.2f", playbackSpeed); UserPreferences.setPlaybackSpeed(speed); txtvPlaybackSpeed.setText(speed + "x"); } else if(fromUser) { float speed = Float.valueOf(UserPreferences.getPlaybackSpeed()); barPlaybackSpeed.post(() -> { barPlaybackSpeed.setProgress((int) (20 * speed) - 10); }); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { if(controller != null && !controller.canSetPlaybackSpeed()) { VariableSpeedDialog.showGetPluginDialog(MediaplayerActivity.this); } } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); barPlaybackSpeed.setProgress((int) (20 * currentSpeed) - 10); final SeekBar barLeftVolume = (SeekBar) dialog.findViewById(R.id.volume_left); barLeftVolume.setProgress(100); final SeekBar barRightVolume = (SeekBar) dialog.findViewById(R.id.volume_right); barRightVolume.setProgress(100); final CheckBox stereoToMono = (CheckBox) dialog.findViewById(R.id.stereo_to_mono); stereoToMono.setChecked(UserPreferences.stereoToMono()); if (controller != null && !controller.canDownmix()) { stereoToMono.setEnabled(false); String sonicOnly = getString(R.string.sonic_only); stereoToMono.setText(stereoToMono.getText() + " [" + sonicOnly + "]"); } barLeftVolume.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { float leftVolume = 1.0f, rightVolume = 1.0f; if (progress < 100) { leftVolume = progress / 100.0f; } if (barRightVolume.getProgress() < 100) { rightVolume = barRightVolume.getProgress() / 100.0f; } controller.setVolume(leftVolume, rightVolume); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); barRightVolume.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { float leftVolume = 1.0f, rightVolume = 1.0f; if (progress < 100) { rightVolume = progress / 100.0f; } if (barLeftVolume.getProgress() < 100) { leftVolume = barLeftVolume.getProgress() / 100.0f; } controller.setVolume(leftVolume, rightVolume); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); stereoToMono.setOnCheckedChangeListener((buttonView, isChecked) -> { UserPreferences.stereoToMono(isChecked); if (controller != null) { controller.setDownmix(isChecked); } }); break; case R.id.visit_website_item: Uri uri = Uri.parse(media.getWebsiteLink()); startActivity(new Intent(Intent.ACTION_VIEW, uri)); break; case R.id.support_item: if (media instanceof FeedMedia) { DBTasks.flattrItemIfLoggedIn(this, ((FeedMedia) media).getItem()); } break; case R.id.share_link_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemLink(this, ((FeedMedia) media).getItem()); } break; case R.id.share_download_url_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemDownloadLink(this, ((FeedMedia) media).getItem()); } break; case R.id.share_link_with_position_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemLink(this, ((FeedMedia) media).getItem(), true); } break; case R.id.share_download_url_with_position_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemDownloadLink(this, ((FeedMedia) media).getItem(), true); } break; default: return false; } return true; } else { return false; } } } @Override protected void onResume() { super.onResume(); Log.d(TAG, "onResume()"); StorageUtils.checkStorageAvailability(this); controller.init(); } /** * Called by 'handleStatus()' when the PlaybackService is waiting for * a video surface. */ protected abstract void onAwaitingVideoSurface(); protected abstract void postStatusMsg(int resId); protected abstract void clearStatusMsg(); protected void onPositionObserverUpdate() { if (controller != null) { int currentPosition = controller.getPosition(); int duration = controller.getDuration(); Log.d(TAG, "currentPosition " + Converter .getDurationStringLong(currentPosition)); if (currentPosition != PlaybackService.INVALID_TIME && duration != PlaybackService.INVALID_TIME && controller.getMedia() != null) { txtvPosition.setText(Converter .getDurationStringLong(currentPosition)); if (showTimeLeft) { txtvLength.setText("-" + Converter .getDurationStringLong(duration - currentPosition)); } else { txtvLength.setText(Converter .getDurationStringLong(duration)); } updateProgressbarPosition(currentPosition, duration); } else { Log.w(TAG, "Could not react to position observer update because of invalid time"); } } } private void updateProgressbarPosition(int position, int duration) { Log.d(TAG, "updateProgressbarPosition(" + position + ", " + duration + ")"); float progress = ((float) position) / duration; sbPosition.setProgress((int) (progress * sbPosition.getMax())); } /** * Load information about the media that is going to be played or currently * being played. This method will be called when the activity is connected * to the PlaybackService to ensure that the activity has the right * FeedMedia object. */ protected boolean loadMediaInfo() { Log.d(TAG, "loadMediaInfo()"); Playable media = controller.getMedia(); SharedPreferences prefs = getSharedPreferences(PREFS, MODE_PRIVATE); showTimeLeft = prefs.getBoolean(PREF_SHOW_TIME_LEFT, false); if (media != null) { txtvPosition.setText(Converter.getDurationStringLong((media.getPosition()))); if (media.getDuration() != 0) { txtvLength.setText(Converter.getDurationStringLong(media.getDuration())); float progress = ((float) media.getPosition()) / media.getDuration(); sbPosition.setProgress((int) (progress * sbPosition.getMax())); if (showTimeLeft) { int timeLeft = media.getDuration() - media.getPosition(); txtvLength.setText("-" + Converter.getDurationStringLong(timeLeft)); } } checkFavorite(); if(butPlaybackSpeed != null) { if (controller == null) { butPlaybackSpeed.setVisibility(View.GONE); } else { butPlaybackSpeed.setVisibility(View.VISIBLE); if (controller.canSetPlaybackSpeed()) { ViewCompat.setAlpha(butPlaybackSpeed, 1.0f); } else { ViewCompat.setAlpha(butPlaybackSpeed, 0.5f); } } updateButPlaybackSpeed(); } return true; } else { return false; } } protected void setupGUI() { setContentView(getContentViewResourceId()); sbPosition = (SeekBar) findViewById(R.id.sbPosition); txtvPosition = (TextView) findViewById(R.id.txtvPosition); SharedPreferences prefs = getSharedPreferences(PREFS, MODE_PRIVATE); showTimeLeft = prefs.getBoolean(PREF_SHOW_TIME_LEFT, false); Log.d("timeleft", showTimeLeft ? "true" : "false"); txtvLength = (TextView) findViewById(R.id.txtvLength); txtvLength.setOnClickListener(v -> { showTimeLeft = !showTimeLeft; Playable media = controller.getMedia(); if (media == null) { return; } String length; if (showTimeLeft) { length = "-" + Converter.getDurationStringLong(media.getDuration() - media.getPosition()); } else { length = Converter.getDurationStringLong(media.getDuration()); } txtvLength.setText(length); SharedPreferences.Editor editor = prefs.edit(); editor.putBoolean(PREF_SHOW_TIME_LEFT, showTimeLeft); editor.apply(); Log.d("timeleft on click", showTimeLeft ? "true" : "false"); }); butPlaybackSpeed = (Button) findViewById(R.id.butPlaybackSpeed); butRev = (ImageButton) findViewById(R.id.butRev); txtvRev = (TextView) findViewById(R.id.txtvRev); if (txtvRev != null) { txtvRev.setText(String.valueOf(UserPreferences.getRewindSecs())); } butPlay = (ImageButton) findViewById(R.id.butPlay); butFF = (ImageButton) findViewById(R.id.butFF); txtvFF = (TextView) findViewById(R.id.txtvFF); if (txtvFF != null) { txtvFF.setText(String.valueOf(UserPreferences.getFastFowardSecs())); } butSkip = (ImageButton) findViewById(R.id.butSkip); // SEEKBAR SETUP sbPosition.setOnSeekBarChangeListener(this); // BUTTON SETUP if(butPlaybackSpeed != null) { butPlaybackSpeed.setOnClickListener(v -> { if (controller == null) { return; } if (controller.canSetPlaybackSpeed()) { String[] availableSpeeds = UserPreferences.getPlaybackSpeedArray(); String currentSpeed = UserPreferences.getPlaybackSpeed(); // Provide initial value in case the speed list has changed // out from under us // and our current speed isn't in the new list String newSpeed; if (availableSpeeds.length > 0) { newSpeed = availableSpeeds[0]; } else { newSpeed = "1.00"; } for (int i = 0; i < availableSpeeds.length; i++) { if (availableSpeeds[i].equals(currentSpeed)) { if (i == availableSpeeds.length - 1) { newSpeed = availableSpeeds[0]; } else { newSpeed = availableSpeeds[i + 1]; } break; } } UserPreferences.setPlaybackSpeed(newSpeed); controller.setPlaybackSpeed(Float.parseFloat(newSpeed)); } else { VariableSpeedDialog.showGetPluginDialog(this); } }); butPlaybackSpeed.setOnLongClickListener(v -> { VariableSpeedDialog.showDialog(this); return true; }); } if (butRev != null) { butRev.setOnClickListener(v -> { int curr = controller.getPosition(); controller.seekTo(curr - UserPreferences.getRewindSecs() * 1000); }); butRev.setOnLongClickListener(new View.OnLongClickListener() { int choice; @Override public boolean onLongClick(View v) { int checked = 0; int rewindSecs = UserPreferences.getRewindSecs(); final int[] values = getResources().getIntArray(R.array.seek_delta_values); final String[] choices = new String[values.length]; for (int i = 0; i < values.length; i++) { if (rewindSecs == values[i]) { checked = i; } choices[i] = String.valueOf(values[i]) + " " + getString(R.string.time_seconds); } choice = values[checked]; AlertDialog.Builder builder = new AlertDialog.Builder(MediaplayerActivity.this); builder.setTitle(R.string.pref_rewind); builder.setSingleChoiceItems(choices, checked, (dialog, which) -> { choice = values[which]; }); builder.setNegativeButton(R.string.cancel_label, null); builder.setPositiveButton(R.string.confirm_label, (dialog, which) -> { UserPreferences.setPrefRewindSecs(choice); txtvRev.setText(String.valueOf(choice)); }); builder.create().show(); return true; } }); } butPlay.setOnClickListener(controller.newOnPlayButtonClickListener()); if (butFF != null) { butFF.setOnClickListener(v -> { int curr = controller.getPosition(); controller.seekTo(curr + UserPreferences.getFastFowardSecs() * 1000); }); butFF.setOnLongClickListener(new View.OnLongClickListener() { int choice; @Override public boolean onLongClick(View v) { int checked = 0; int rewindSecs = UserPreferences.getFastFowardSecs(); final int[] values = getResources().getIntArray(R.array.seek_delta_values); final String[] choices = new String[values.length]; for (int i = 0; i < values.length; i++) { if (rewindSecs == values[i]) { checked = i; } choices[i] = String.valueOf(values[i]) + " " + getString(R.string.time_seconds); } choice = values[checked]; AlertDialog.Builder builder = new AlertDialog.Builder(MediaplayerActivity.this); builder.setTitle(R.string.pref_fast_forward); builder.setSingleChoiceItems(choices, checked, (dialog, which) -> { choice = values[which]; }); builder.setNegativeButton(R.string.cancel_label, null); builder.setPositiveButton(R.string.confirm_label, (dialog, which) -> { UserPreferences.setPrefFastForwardSecs(choice); txtvFF.setText(String.valueOf(choice)); }); builder.create().show(); return true; } }); } if (butSkip != null) { butSkip.setOnClickListener(v -> { sendBroadcast(new Intent(PlaybackService.ACTION_SKIP_CURRENT_EPISODE)); }); } } protected abstract int getContentViewResourceId(); void handleError(int errorCode) { final AlertDialog.Builder errorDialog = new AlertDialog.Builder(this); errorDialog.setTitle(R.string.error_label); errorDialog .setMessage(MediaPlayerError.getErrorString(this, errorCode)); errorDialog.setNeutralButton("OK", (dialog, which) -> { dialog.dismiss(); finish(); } ); errorDialog.create().show(); } float prog; @Override public void onProgressChanged (SeekBar seekBar,int progress, boolean fromUser) { if (controller != null) { prog = controller.onSeekBarProgressChanged(seekBar, progress, fromUser, txtvPosition); if (showTimeLeft && prog != 0) { int duration = controller.getDuration(); String length = "-" + Converter.getDurationStringLong(duration - (int) (prog * duration)); txtvLength.setText(length); } } } private void updateButPlaybackSpeed() { if (controller != null && butPlaybackSpeed != null) { butPlaybackSpeed.setText(UserPreferences.getPlaybackSpeed() + "x"); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { if (controller != null) { controller.onSeekBarStartTrackingTouch(seekBar); } } @Override public void onStopTrackingTouch(SeekBar seekBar) { if (controller != null) { controller.onSeekBarStopTrackingTouch(seekBar, prog); } } private void checkFavorite() { Playable playable = controller.getMedia(); if (playable != null && playable instanceof FeedMedia) { FeedItem feedItem = ((FeedMedia) playable).getItem(); if (feedItem != null) { Observable.fromCallable(() -> DBReader.getFeedItem(feedItem.getId())) .subscribeOn(Schedulers.newThread()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(item -> { boolean isFav = item.isTagged(FeedItem.TAG_FAVORITE); if(isFavorite != isFav) { isFavorite = isFav; invalidateOptionsMenu(); } }); } } } }
Don't NPE
app/src/main/java/de/danoeh/antennapod/activity/MediaplayerActivity.java
Don't NPE
Java
epl-1.0
d99173ff3e871d3114e094f49d833f4c38e1571b
0
bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs
/******************************************************************************* * Copyright (c) 1998, 2012 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 12/17/2010-2.2 Guy Pelletier * - 330755: Nested embeddables can't be used as embedded ids * 11/10/2011-2.4 Guy Pelletier * - 357474: Address primaryKey option from tenant discriminator column * 14/05/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications ******************************************************************************/ package org.eclipse.persistence.descriptors; import java.io.Serializable; import java.security.AccessController; import java.util.HashSet; import java.util.Set; import org.eclipse.persistence.annotations.CacheKeyType; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.ObjectReferenceMapping; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.mappings.foundation.AbstractColumnMapping; import org.eclipse.persistence.queries.UpdateObjectQuery; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.identitymaps.CacheId; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.sessions.AbstractSession; /** * <p> * <b>Description</b>: Place holder for CMP specific information. This class can be set on the ClassDescriptor. * * @see org.eclipse.persistence.descriptors.PessimisticLockingPolicy * * @since TopLink 10.1.3 */ public class CMPPolicy implements java.io.Serializable, Cloneable { protected Boolean forceUpdate; protected Boolean updateAllFields; /** Allow the bean to always be locked as it enters a new transaction. */ protected PessimisticLockingPolicy pessimisticLockingPolicy; /** Class originally mapped, before anything was generated. */ protected Class mappedClass; protected ClassDescriptor descriptor; /** The object deferral level. This controls when objects changes will be sent to the Database. */ protected int modificationDeferralLevel = ALL_MODIFICATIONS; /** defer no changes */ public static final int NONE = 0; /** defer updates */ public static final int UPDATE_MODIFICATIONS = 1; /** defer all modifications, inserts and deletes included (default) */ public static final int ALL_MODIFICATIONS = 2; /** This setting will allow customers to control when Toplink will issue the insert SQL for CMP beans. */ protected int nonDeferredCreateTime = UNDEFINED; /** undefined if it is non-deferred issue sql at create */ public static final int UNDEFINED = 0; /** issue SQL after ejbCreate but before ejbPostCreate */ public static final int AFTER_EJBCREATE = 1; /** issue SQL after ejbPostCreate */ public static final int AFTER_EJBPOSTCREATE = 2; public CMPPolicy() { this.forceUpdate = null; this.updateAllFields = null; } /** * ADVANCED: * This setting is only available for CMP beans that are not being deferred. * Using it will allow TopLink to determine if the INSERT SQL should be sent to * the database before or after the postCreate call. */ public int getNonDeferredCreateTime() { return this.nonDeferredCreateTime; } /** * PUBLIC: * Return the policy for bean pessimistic locking * @see #org.eclipse.persistence.descriptors.PessimisticLockingPolicy */ public PessimisticLockingPolicy getPessimisticLockingPolicy() { return pessimisticLockingPolicy; } /** * ADVANCED: * This can be set to control when changes to objects are submitted to the database * This is only applicable to TopLink's CMP implementation and not available within * the core. */ public void setDeferModificationsUntilCommit(int deferralLevel) { this.modificationDeferralLevel = deferralLevel; } /** * PUBLIC: * Define the mapped class. This is the class which was originally mapped in the MW * * @param Class newMappedClass */ public void setMappedClass(Class newMappedClass) { mappedClass = newMappedClass; } /** * PUBLIC: * Answer the mapped class. This is the class which was originally mapped in the MW * */ public Class getMappedClass() { return mappedClass; } /** * ADVANCED: * This setting is only available for CMP beans that are not being deferred. * Using it will allow TopLink to determine if the INSERT SQL should be sent to * the database before or after the postCreate call. */ public void setNonDeferredCreateTime(int createTime) { this.nonDeferredCreateTime = createTime; } /** * PUBLIC: * Configure bean pessimistic locking * * @param PessimisticLockingPolicy policy * @see #org.eclipse.persistence.descriptors.PessimisticLockingPolicy */ public void setPessimisticLockingPolicy(PessimisticLockingPolicy policy) { pessimisticLockingPolicy = policy; } /** * PUBLIC: * Return true if bean pessimistic locking is configured */ public boolean hasPessimisticLockingPolicy() { return pessimisticLockingPolicy != null; } /** * ADVANCED: * This can be used to control when changes to objects are submitted to the database * This is only applicable to TopLink's CMP implementation and not available within * the core. */ public int getDeferModificationsUntilCommit() { return this.modificationDeferralLevel; } /** * ADVANCED: * Return true if descriptor is set to always update all registered objects of this type */ public boolean getForceUpdate() { // default to false return (Boolean.TRUE.equals(this.forceUpdate)); } /** * ADVANCED: * Configure whether TopLink should always update all registered objects of * this type. NOTE: if set to true, then updateAllFields must also be set * to true * * @param boolean shouldForceUpdate */ public void setForceUpdate(boolean shouldForceUpdate) { this.forceUpdate = Boolean.valueOf(shouldForceUpdate); } /** * ADVANCED: * Return true if descriptor is set to update all fields for an object of this * type when an update occurs. */ public boolean getUpdateAllFields() { // default to false return Boolean.TRUE.equals(this.updateAllFields); } /** * ADVANCED: * Configure whether TopLink should update all fields for an object of this * type when an update occurs. * * @param boolean shouldUpdatAllFields */ public void setUpdateAllFields(boolean shouldUpdatAllFields) { this.updateAllFields = Boolean.valueOf(shouldUpdatAllFields); } /** * INTERNAL: * return internal tri-state value so we can decide whether to inherit or not at init time. */ public Boolean internalGetForceUpdate() { return this.forceUpdate; } /** * INTERNAL: * return internal tri-state value so we can decide whether to inherit or not at init time. */ public Boolean internalGetUpdateAllFields() { return this.updateAllFields; } /** * INTERNAL: * internal method to set the tri-state value. This is done in InheritancePolicy at init time. */ public void internalSetForceUpdate(Boolean newForceUpdateValue) { this.forceUpdate = newForceUpdateValue; } /** * INTERNAL: * internal method to set the tri-state value. This is done in InheritancePolicy at init time. */ public void internalSetUpdateAllFields(Boolean newUpdateAllFieldsValue) { this.updateAllFields = newUpdateAllFieldsValue; } /** * INTERNAL: * Initialize the CMPPolicy settings. */ public void initialize(ClassDescriptor descriptor, AbstractSession session) throws DescriptorException { // updateAllFields is true so set custom query in DescriptorQueryManager // to force full SQL. Don't overwrite a user defined query if (this.getUpdateAllFields() && !descriptor.getQueryManager().hasUpdateQuery()) { descriptor.getQueryManager().setUpdateQuery(new UpdateObjectQuery()); } // make sure updateAllFields is set if forceUpdate is true if (this.getForceUpdate() && !this.getUpdateAllFields()) { throw DescriptorException.updateAllFieldsNotSet(descriptor); } } /** * INTERNAL: * @return Returns the owningDescriptor. */ public ClassDescriptor getDescriptor() { return descriptor; } /** * INTERNAL: * @param owningDescriptor The owningDescriptor to set. */ public void setDescriptor(ClassDescriptor owningDescriptor) { this.descriptor = owningDescriptor; } /** * INTERNAL: * Recursive method to set a field value in the given key instance. */ protected void setFieldValue(KeyElementAccessor accessor, Object keyInstance, DatabaseMapping mapping, AbstractSession session, int[] elementIndex, Object ... keyElements) { if (mapping.isAggregateMapping()) { Object nestedObject = mapping.getRealAttributeValueFromObject(keyInstance, session); if (nestedObject == null) { nestedObject = getClassInstance(mapping.getReferenceDescriptor().getJavaClass()); mapping.setRealAttributeValueInObject(keyInstance, nestedObject); } // keep drilling down the nested mappings ... setFieldValue(accessor, nestedObject, mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()), session, elementIndex, keyElements); } else { Object fieldValue = null; if (mapping.isAbstractColumnMapping()) { fieldValue = keyElements[elementIndex[0]]; Converter converter = ((AbstractColumnMapping) mapping).getConverter(); if (converter != null){ fieldValue = converter.convertDataValueToObjectValue(fieldValue, session); } ++elementIndex[0]; } else if (mapping.isObjectReferenceMapping()) { // what if mapping comes from derived ID. need to get the derived mapping. // get reference descriptor and extract pk from target cmp policy fieldValue = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstanceFromPrimaryKeyValues(session, elementIndex, keyElements); } accessor.setValue(keyInstance, fieldValue); } } /** * INTERNAL: * Return if this policy is for CMP3. */ public boolean isCMP3Policy() { return false; } /** * INTERNAL: * Clone the CMPPolicy */ public CMPPolicy clone() { try { return (CMPPolicy) super.clone(); } catch (CloneNotSupportedException exception) { throw new InternalError(exception.getMessage()); } } /** * INTERNAL: * Convert all the class-name-based settings in this object to actual class-based * settings. This method is used when converting a project that has been built * with class names to a project with classes. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader){ } /** * INTERNAL: * Create an instance of the composite primary key class for the key object. */ public Object createPrimaryKeyInstanceFromId(Object key, AbstractSession session) { if (this.descriptor.getCachePolicy().getCacheKeyType() == CacheKeyType.CACHE_ID) { return createPrimaryKeyInstanceFromPrimaryKeyValues(session, new int[]{0}, ((CacheId)key).getPrimaryKey()); } else { return createPrimaryKeyInstanceFromPrimaryKeyValues(session, new int[]{0}, key); } } /** * INTERNAL: * Create an instance of the composite primary key class for the key object. * Yes the elementIndex looks strange but this is just a simple way to get the index to be pass-by-reference */ public Object createPrimaryKeyInstanceFromPrimaryKeyValues(AbstractSession session, int[] elementIndex, Object ... keyElements ) { Object keyInstance = null; KeyElementAccessor[] pkElementArray = getKeyClassFields(); if (isSingleKey(pkElementArray)) { for (KeyElementAccessor accessor: pkElementArray){ DatabaseMapping mapping = getDescriptor().getObjectBuilder().getMappingForAttributeName(accessor.getAttributeName()); if (mapping != null && !mapping.isMultitenantPrimaryKeyMapping()){ if (mapping.isAbstractColumnMapping()) { Converter converter = ((AbstractColumnMapping) mapping).getConverter(); if (converter != null){ return converter.convertDataValueToObjectValue(keyElements[elementIndex[0]], session); } keyInstance = keyElements[elementIndex[0]]; } else if (mapping.isObjectReferenceMapping()) { // what if mapping comes from derived ID. need to get the derived mapping. //get reference descriptor and extract pk from target cmp policy keyInstance = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstanceFromPrimaryKeyValues(session, elementIndex, keyElements); } ++elementIndex[0]; // remove processed key in case keys are complex and derived } if (keyInstance != null){ return keyInstance; } } } else { keyInstance = getPKClassInstance(); //get clone of Key so we can remove values. for (int index = 0; index < pkElementArray.length; index++) { KeyElementAccessor accessor = pkElementArray[index]; DatabaseMapping mapping = getDescriptor().getObjectBuilder().getMappingForAttributeName(accessor.getAttributeName()); if (mapping == null) { mapping = getDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()); } if (accessor.isNestedAccessor()) { // Need to recursively build all the nested objects. setFieldValue(accessor, keyInstance, mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()), session, elementIndex, keyElements); } else { // Not nested but may be a single layer aggregate so check. if (mapping.isAggregateMapping()) { mapping = mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()); } setFieldValue(accessor, keyInstance, mapping, session, elementIndex, keyElements); } } } return keyInstance; } /** * INTERNAL: * Create an instance of the Id class or value from the object. */ public Object createPrimaryKeyInstance(Object object, AbstractSession session) { KeyElementAccessor[] pkElementArray = this.getKeyClassFields(); ObjectBuilder builder = getDescriptor().getObjectBuilder(); if (pkElementArray.length == 1 && pkElementArray[0] instanceof KeyIsElementAccessor){ DatabaseMapping mapping = builder.getMappingForAttributeName(pkElementArray[0].getAttributeName()); Object fieldValue = mapping.getRealAttributeValueFromObject(object, session); if (mapping.isObjectReferenceMapping()){ fieldValue = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstance(fieldValue, session); } return fieldValue; } Object keyInstance = getPKClassInstance(); Set<ObjectReferenceMapping> usedObjectReferenceMappings = new HashSet<ObjectReferenceMapping>(); for (int index = 0; index < pkElementArray.length; index++) { Object keyObj = object; KeyElementAccessor accessor = pkElementArray[index]; DatabaseField field = accessor.getDatabaseField(); DatabaseMapping mapping = builder.getMappingForField(field); // With session validation, the mapping shouldn't be null at this // point, don't bother checking. if (!mapping.isObjectReferenceMapping() || !usedObjectReferenceMappings.contains(mapping)){ while (mapping.isAggregateObjectMapping()) { keyObj = mapping.getRealAttributeValueFromObject(keyObj, session); mapping = mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(field); } Object fieldValue = mapping.getRealAttributeValueFromObject(keyObj, session); if (mapping.isObjectReferenceMapping()){ fieldValue = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstance(fieldValue, session); usedObjectReferenceMappings.add((ObjectReferenceMapping)mapping); } accessor.setValue(keyInstance, fieldValue); } } return keyInstance; } /** * INTERNAL: * Return a new instance of the class provided. */ public Object getClassInstance(Class cls) { if (cls != null){ try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ return AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(cls)); } else { return org.eclipse.persistence.internal.security.PrivilegedAccessHelper.newInstanceFromClass(cls); } } catch (Exception e) { throw ValidationException.reflectiveExceptionWhileCreatingClassInstance(cls.getName(), e); } } return null; } /** * INTERNAL: */ public Object getPKClassInstance() { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: */ public Class getPKClass() { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: * Use the key to create a EclipseLink primary key. * If the key is simple (direct mapped) then just add it to a vector, * otherwise must go through the inefficient process of copying the key into the bean * and extracting the key from the bean. */ public Object createPrimaryKeyFromId(Object key, AbstractSession session) { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: * Use the key to create a bean and initialize its primary key fields. * Note: If is a compound PK then a primary key object is being used. * This method should only be used for 'templates' when executing * queries. The bean built will not be given an EntityContext and should * not be used as an actual entity bean. * * @param key Object the primary key to use for initializing the bean's * corresponding pk fields * @return Object */ public Object createBeanUsingKey(Object key, AbstractSession session) { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: * @return Returns the keyClassFields. */ protected KeyElementAccessor[] getKeyClassFields() { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * Check to see if there is a single key element. Iterate through the list of primary key elements * and count only keys that are not part of the Multitenant identifier. * * @param pkElementArray * @return */ protected boolean isSingleKey(KeyElementAccessor[] pkElementArray){ if ((pkElementArray.length == 1) && (pkElementArray[0] instanceof KeyIsElementAccessor)) { return true; } boolean foundFirstElement = false; for (KeyElementAccessor accessor: pkElementArray){ if (!(accessor instanceof KeyIsElementAccessor)){ return false; } if (!accessor.getMapping().isMultitenantPrimaryKeyMapping()){ if (foundFirstElement){ return false; } foundFirstElement = true; } } return true; } /** * INTERNAL: * This is the interface used to encapsulate the the type of key class element */ protected interface KeyElementAccessor { public String getAttributeName(); public DatabaseField getDatabaseField(); public DatabaseMapping getMapping(); public Object getValue(Object object, AbstractSession session); public void setValue(Object object, Object value); public boolean isNestedAccessor(); } /** * INTERNAL: * This class will be used when the keyClass is a primitive */ protected class KeyIsElementAccessor implements KeyElementAccessor, Serializable { protected String attributeName; protected DatabaseField databaseField; protected DatabaseMapping mapping; public KeyIsElementAccessor(String attributeName, DatabaseField databaseField, DatabaseMapping mapping) { this.attributeName = attributeName; this.databaseField = databaseField; this.mapping = mapping; } public String getAttributeName() { return attributeName; } public DatabaseField getDatabaseField() { return this.databaseField; } public DatabaseMapping getMapping(){ return this.mapping; } public Object getValue(Object object, AbstractSession session) { return object; } public boolean isNestedAccessor() { return false; } public void setValue(Object object, Object value) { // WIP - do nothing for now??? } } }
foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/descriptors/CMPPolicy.java
/******************************************************************************* * Copyright (c) 1998, 2012 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 12/17/2010-2.2 Guy Pelletier * - 330755: Nested embeddables can't be used as embedded ids * 11/10/2011-2.4 Guy Pelletier * - 357474: Address primaryKey option from tenant discriminator column * 14/05/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications ******************************************************************************/ package org.eclipse.persistence.descriptors; import java.io.Serializable; import java.security.AccessController; import java.util.HashSet; import java.util.Set; import org.eclipse.persistence.annotations.CacheKeyType; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.ObjectReferenceMapping; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.mappings.foundation.AbstractColumnMapping; import org.eclipse.persistence.queries.UpdateObjectQuery; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.identitymaps.CacheId; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.sessions.AbstractSession; /** * <p> * <b>Description</b>: Place holder for CMP specific information. This class can be set on the ClassDescriptor. * * @see org.eclipse.persistence.descriptors.PessimisticLockingPolicy * * @since TopLink 10.1.3 */ public class CMPPolicy implements java.io.Serializable, Cloneable { protected Boolean forceUpdate; protected Boolean updateAllFields; /** Allow the bean to always be locked as it enters a new transaction. */ protected PessimisticLockingPolicy pessimisticLockingPolicy; /** Class originally mapped, before anything was generated. */ protected Class mappedClass; protected ClassDescriptor descriptor; /** The object deferral level. This controls when objects changes will be sent to the Database. */ protected int modificationDeferralLevel = ALL_MODIFICATIONS; /** defer no changes */ public static final int NONE = 0; /** defer updates */ public static final int UPDATE_MODIFICATIONS = 1; /** defer all modifications, inserts and deletes included (default) */ public static final int ALL_MODIFICATIONS = 2; /** This setting will allow customers to control when Toplink will issue the insert SQL for CMP beans. */ protected int nonDeferredCreateTime = UNDEFINED; /** undefined if it is non-deferred issue sql at create */ public static final int UNDEFINED = 0; /** issue SQL after ejbCreate but before ejbPostCreate */ public static final int AFTER_EJBCREATE = 1; /** issue SQL after ejbPostCreate */ public static final int AFTER_EJBPOSTCREATE = 2; public CMPPolicy() { this.forceUpdate = null; this.updateAllFields = null; } /** * ADVANCED: * This setting is only available for CMP beans that are not being deferred. * Using it will allow TopLink to determine if the INSERT SQL should be sent to * the database before or after the postCreate call. */ public int getNonDeferredCreateTime() { return this.nonDeferredCreateTime; } /** * PUBLIC: * Return the policy for bean pessimistic locking * @see #org.eclipse.persistence.descriptors.PessimisticLockingPolicy */ public PessimisticLockingPolicy getPessimisticLockingPolicy() { return pessimisticLockingPolicy; } /** * ADVANCED: * This can be set to control when changes to objects are submitted to the database * This is only applicable to TopLink's CMP implementation and not available within * the core. */ public void setDeferModificationsUntilCommit(int deferralLevel) { this.modificationDeferralLevel = deferralLevel; } /** * PUBLIC: * Define the mapped class. This is the class which was originally mapped in the MW * * @param Class newMappedClass */ public void setMappedClass(Class newMappedClass) { mappedClass = newMappedClass; } /** * PUBLIC: * Answer the mapped class. This is the class which was originally mapped in the MW * */ public Class getMappedClass() { return mappedClass; } /** * ADVANCED: * This setting is only available for CMP beans that are not being deferred. * Using it will allow TopLink to determine if the INSERT SQL should be sent to * the database before or after the postCreate call. */ public void setNonDeferredCreateTime(int createTime) { this.nonDeferredCreateTime = createTime; } /** * PUBLIC: * Configure bean pessimistic locking * * @param PessimisticLockingPolicy policy * @see #org.eclipse.persistence.descriptors.PessimisticLockingPolicy */ public void setPessimisticLockingPolicy(PessimisticLockingPolicy policy) { pessimisticLockingPolicy = policy; } /** * PUBLIC: * Return true if bean pessimistic locking is configured */ public boolean hasPessimisticLockingPolicy() { return pessimisticLockingPolicy != null; } /** * ADVANCED: * This can be used to control when changes to objects are submitted to the database * This is only applicable to TopLink's CMP implementation and not available within * the core. */ public int getDeferModificationsUntilCommit() { return this.modificationDeferralLevel; } /** * ADVANCED: * Return true if descriptor is set to always update all registered objects of this type */ public boolean getForceUpdate() { // default to false return (Boolean.TRUE.equals(this.forceUpdate)); } /** * ADVANCED: * Configure whether TopLink should always update all registered objects of * this type. NOTE: if set to true, then updateAllFields must also be set * to true * * @param boolean shouldForceUpdate */ public void setForceUpdate(boolean shouldForceUpdate) { this.forceUpdate = Boolean.valueOf(shouldForceUpdate); } /** * ADVANCED: * Return true if descriptor is set to update all fields for an object of this * type when an update occurs. */ public boolean getUpdateAllFields() { // default to false return Boolean.TRUE.equals(this.updateAllFields); } /** * ADVANCED: * Configure whether TopLink should update all fields for an object of this * type when an update occurs. * * @param boolean shouldUpdatAllFields */ public void setUpdateAllFields(boolean shouldUpdatAllFields) { this.updateAllFields = Boolean.valueOf(shouldUpdatAllFields); } /** * INTERNAL: * return internal tri-state value so we can decide whether to inherit or not at init time. */ public Boolean internalGetForceUpdate() { return this.forceUpdate; } /** * INTERNAL: * return internal tri-state value so we can decide whether to inherit or not at init time. */ public Boolean internalGetUpdateAllFields() { return this.updateAllFields; } /** * INTERNAL: * internal method to set the tri-state value. This is done in InheritancePolicy at init time. */ public void internalSetForceUpdate(Boolean newForceUpdateValue) { this.forceUpdate = newForceUpdateValue; } /** * INTERNAL: * internal method to set the tri-state value. This is done in InheritancePolicy at init time. */ public void internalSetUpdateAllFields(Boolean newUpdateAllFieldsValue) { this.updateAllFields = newUpdateAllFieldsValue; } /** * INTERNAL: * Initialize the CMPPolicy settings. */ public void initialize(ClassDescriptor descriptor, AbstractSession session) throws DescriptorException { // updateAllFields is true so set custom query in DescriptorQueryManager // to force full SQL. Don't overwrite a user defined query if (this.getUpdateAllFields() && !descriptor.getQueryManager().hasUpdateQuery()) { descriptor.getQueryManager().setUpdateQuery(new UpdateObjectQuery()); } // make sure updateAllFields is set if forceUpdate is true if (this.getForceUpdate() && !this.getUpdateAllFields()) { throw DescriptorException.updateAllFieldsNotSet(descriptor); } } /** * INTERNAL: * @return Returns the owningDescriptor. */ public ClassDescriptor getDescriptor() { return descriptor; } /** * INTERNAL: * @param owningDescriptor The owningDescriptor to set. */ public void setDescriptor(ClassDescriptor owningDescriptor) { this.descriptor = owningDescriptor; } /** * INTERNAL: * Recursive method to set a field value in the given key instance. */ protected void setFieldValue(KeyElementAccessor accessor, Object keyInstance, DatabaseMapping mapping, AbstractSession session, int[] elementIndex, Object ... keyElements) { if (mapping.isAggregateMapping()) { Object nestedObject = mapping.getRealAttributeValueFromObject(keyInstance, session); if (nestedObject == null) { nestedObject = getClassInstance(mapping.getReferenceDescriptor().getJavaClass()); mapping.setRealAttributeValueInObject(keyInstance, nestedObject); } // keep drilling down the nested mappings ... setFieldValue(accessor, nestedObject, mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()), session, elementIndex, keyElements); } else { Object fieldValue = null; if (mapping.isAbstractColumnMapping()) { fieldValue = keyElements[elementIndex[0]]; Converter converter = ((AbstractColumnMapping) mapping).getConverter(); if (converter != null){ fieldValue = converter.convertDataValueToObjectValue(fieldValue, session); } ++elementIndex[0]; } else if (mapping.isObjectReferenceMapping()) { // what if mapping comes from derived ID. need to get the derived mapping. // get reference descriptor and extract pk from target cmp policy fieldValue = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstanceFromPrimaryKeyValues(session, elementIndex, keyElements); } accessor.setValue(keyInstance, fieldValue); } } /** * INTERNAL: * Return if this policy is for CMP3. */ public boolean isCMP3Policy() { return false; } /** * INTERNAL: * Clone the CMPPolicy */ public CMPPolicy clone() { try { return (CMPPolicy) super.clone(); } catch (CloneNotSupportedException exception) { throw new InternalError(exception.getMessage()); } } /** * INTERNAL: * Convert all the class-name-based settings in this object to actual class-based * settings. This method is used when converting a project that has been built * with class names to a project with classes. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader){ } /** * INTERNAL: * Create an instance of the composite primary key class for the key object. */ public Object createPrimaryKeyInstanceFromId(Object key, AbstractSession session) { if (this.descriptor.getCachePolicy().getCacheKeyType() == CacheKeyType.CACHE_ID) { return createPrimaryKeyInstanceFromPrimaryKeyValues(session, new int[]{0}, ((CacheId)key).getPrimaryKey()); } else { return createPrimaryKeyInstanceFromPrimaryKeyValues(session, new int[]{0}, key); } } /** * INTERNAL: * Create an instance of the composite primary key class for the key object. * Yes the elementIndex looks strange but this is just a simple way to get the index to be pass-by-reference */ public Object createPrimaryKeyInstanceFromPrimaryKeyValues(AbstractSession session, int[] elementIndex, Object ... keyElements ) { Object keyInstance = getPKClassInstance(); KeyElementAccessor[] pkElementArray = getKeyClassFields(); if (keyInstance == null){ // single primary key - there is no key class for (KeyElementAccessor accessor : pkElementArray){ if (accessor instanceof KeyIsElementAccessor){ DatabaseMapping mapping = getDescriptor().getObjectBuilder().getMappingForAttributeName(accessor.getAttributeName()); if (mapping != null && !mapping.isMultitenantPrimaryKeyMapping()){ if (mapping.isAbstractColumnMapping()) { Converter converter = ((AbstractColumnMapping) mapping).getConverter(); if (converter != null){ return converter.convertDataValueToObjectValue(keyElements[elementIndex[0]], session); } keyInstance = keyElements[elementIndex[0]]; } else if (mapping.isObjectReferenceMapping()) { // what if mapping comes from derived ID. need to get the derived mapping. //get reference descriptor and extract pk from target cmp policy keyInstance = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstanceFromPrimaryKeyValues(session, elementIndex, keyElements); } ++elementIndex[0]; // remove processed key in case keys are complex and derived } } } } else { //get clone of Key so we can remove values. for (int index = 0; index < pkElementArray.length; index++) { KeyElementAccessor accessor = pkElementArray[index]; DatabaseMapping mapping = getDescriptor().getObjectBuilder().getMappingForAttributeName(accessor.getAttributeName()); if (mapping == null) { mapping = getDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()); } if (accessor.isNestedAccessor()) { // Need to recursively build all the nested objects. setFieldValue(accessor, keyInstance, mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()), session, elementIndex, keyElements); } else { // Not nested but may be a single layer aggregate so check. if (mapping.isAggregateMapping()) { mapping = mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(accessor.getDatabaseField()); } setFieldValue(accessor, keyInstance, mapping, session, elementIndex, keyElements); } } } return keyInstance; } /** * INTERNAL: * Create an instance of the Id class or value from the object. */ public Object createPrimaryKeyInstance(Object object, AbstractSession session) { KeyElementAccessor[] pkElementArray = this.getKeyClassFields(); ObjectBuilder builder = getDescriptor().getObjectBuilder(); if (pkElementArray.length == 1 && pkElementArray[0] instanceof KeyIsElementAccessor){ DatabaseMapping mapping = builder.getMappingForAttributeName(pkElementArray[0].getAttributeName()); Object fieldValue = mapping.getRealAttributeValueFromObject(object, session); if (mapping.isObjectReferenceMapping()){ fieldValue = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstance(fieldValue, session); } return fieldValue; } Object keyInstance = getPKClassInstance(); Set<ObjectReferenceMapping> usedObjectReferenceMappings = new HashSet<ObjectReferenceMapping>(); for (int index = 0; index < pkElementArray.length; index++) { Object keyObj = object; KeyElementAccessor accessor = pkElementArray[index]; DatabaseField field = accessor.getDatabaseField(); DatabaseMapping mapping = builder.getMappingForField(field); // With session validation, the mapping shouldn't be null at this // point, don't bother checking. if (!mapping.isObjectReferenceMapping() || !usedObjectReferenceMappings.contains(mapping)){ while (mapping.isAggregateObjectMapping()) { keyObj = mapping.getRealAttributeValueFromObject(keyObj, session); mapping = mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(field); } Object fieldValue = mapping.getRealAttributeValueFromObject(keyObj, session); if (mapping.isObjectReferenceMapping()){ fieldValue = mapping.getReferenceDescriptor().getCMPPolicy().createPrimaryKeyInstance(fieldValue, session); usedObjectReferenceMappings.add((ObjectReferenceMapping)mapping); } accessor.setValue(keyInstance, fieldValue); } } return keyInstance; } /** * INTERNAL: * Return a new instance of the class provided. */ public Object getClassInstance(Class cls) { if (cls != null){ try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ return AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(cls)); } else { return org.eclipse.persistence.internal.security.PrivilegedAccessHelper.newInstanceFromClass(cls); } } catch (Exception e) { throw ValidationException.reflectiveExceptionWhileCreatingClassInstance(cls.getName(), e); } } return null; } /** * INTERNAL: */ public Object getPKClassInstance() { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: */ public Class getPKClass() { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: * Use the key to create a EclipseLink primary key. * If the key is simple (direct mapped) then just add it to a vector, * otherwise must go through the inefficient process of copying the key into the bean * and extracting the key from the bean. */ public Object createPrimaryKeyFromId(Object key, AbstractSession session) { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: * Use the key to create a bean and initialize its primary key fields. * Note: If is a compound PK then a primary key object is being used. * This method should only be used for 'templates' when executing * queries. The bean built will not be given an EntityContext and should * not be used as an actual entity bean. * * @param key Object the primary key to use for initializing the bean's * corresponding pk fields * @return Object */ public Object createBeanUsingKey(Object key, AbstractSession session) { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: * @return Returns the keyClassFields. */ protected KeyElementAccessor[] getKeyClassFields() { // TODO fix this exception so that it is more descriptive // This method only works in CMP3Policy but was added here for separation // of components throw new RuntimeException("Should not get here."); } /** * INTERNAL: * This is the interface used to encapsulate the the type of key class element */ protected interface KeyElementAccessor { public String getAttributeName(); public DatabaseField getDatabaseField(); public DatabaseMapping getMapping(); public Object getValue(Object object, AbstractSession session); public void setValue(Object object, Object value); public boolean isNestedAccessor(); } /** * INTERNAL: * This class will be used when the keyClass is a primitive */ protected class KeyIsElementAccessor implements KeyElementAccessor, Serializable { protected String attributeName; protected DatabaseField databaseField; protected DatabaseMapping mapping; public KeyIsElementAccessor(String attributeName, DatabaseField databaseField, DatabaseMapping mapping) { this.attributeName = attributeName; this.databaseField = databaseField; this.mapping = mapping; } public String getAttributeName() { return attributeName; } public DatabaseField getDatabaseField() { return this.databaseField; } public DatabaseMapping getMapping(){ return this.mapping; } public Object getValue(Object object, AbstractSession session) { return object; } public boolean isNestedAccessor() { return false; } public void setValue(Object object, Object value) { // WIP - do nothing for now??? } } }
Update CMPPolicy to calculate keys that include Multitenant ids property Former-commit-id: bd5f6c84e33a6c9d5351ade75003f657b421410c
foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/descriptors/CMPPolicy.java
Update CMPPolicy to calculate keys that include Multitenant ids property
Java
agpl-3.0
e6ca356416d94041029c069422c3c1840afee79b
0
alejandro-du/infodoc-core
package infodoc.core.dto; import java.io.Serializable; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.Table; import javax.persistence.UniqueConstraint; import enterpriseapp.hibernate.annotation.CrudTable; import enterpriseapp.hibernate.dto.Dto; @Entity @Table(name="user_group", uniqueConstraints={@UniqueConstraint(columnNames={"name", "parent_user_group_id"})}) @CrudTable(filteringPropertyName="nombre") public class UserGroup extends Dto implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy=GenerationType.AUTO) @Column(name="id") private Long id; @Column(name="name", nullable=false) private String name; @Column(name="access_admin_module", nullable=false) private boolean accessAdminModule; @Column(name="access_admin_users", nullable=false) private boolean accessAdminUsers; @Column(name="access_admin_user_groups", nullable=false) private boolean accessAdminUserGroups; @Column(name="access_admin_forms", nullable=false) private boolean accessAdminCases; @Column(name="access_admin_property_values", nullable=false) private boolean accessAdminPropertyValues; @Column(name="access_admin_activity_instances", nullable=false) private boolean accessAdminActivityInstances; @Column(name="can_create_delete_users", nullable=false) private boolean canCreateAndDeleteUsers; @Column(name="access_hql_query", nullable=false) private boolean accessHqlQuery; @Column(name="access_admin_hql_reports", nullable=false) private boolean accessAdminHqlReports; @Column(name="access_log_files", nullable=false) private boolean accessLogFiles; @Column(name="access_audit_log", nullable=false) private boolean accessAuditLog; @Column(name="access_config_module", nullable=false) private boolean accesoConfigModule; @Column(name="access_config_properties", nullable=false) private boolean accessConfigProperties; @Column(name="access_config_validations", nullable=false) private boolean accessConfigValidations; @Column(name="access_config_numeration", nullable=false) private boolean accessConfigNumeration; @Column(name="access_config_forms", nullable=false) private boolean accessConfigForms; @Column(name="access_config_activities", nullable=false) private boolean accessConfigActivities; @Column(name="access_config_notifications", nullable=false) private boolean accessConfigNotifications; @Column(name="access_config_classifications", nullable=false) private boolean accessConfigClassifications; @Column(name="access_config_classification_values", nullable=false) private boolean accessConfigClassificationValues; @Column(name="access_admin_notification_instances", nullable=false) private boolean accessAdminNotificationInstances; @Column(name="access_basic_module", nullable=false) private boolean accessBasicModule; @Column(name="access_last_activity_instances", nullable=false) private boolean accessLastActivityInstances; @Column(name="access_admin_java_reports", nullable=false) private boolean accessAdminJavaReports; @ManyToOne @JoinColumn(name="parent_user_group_id") private UserGroup parentUserGroup; @ManyToMany @JoinTable( name="user_group_searches_form" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="form_id") } ) private Set<Form> accessSearchForm; @ManyToMany @JoinTable( name="user_group_has_java_report" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="java_report_id") } ) private Set<JavaReport> javaReports; @ManyToMany(mappedBy="userGroup") private Set<User> users; @ManyToMany @JoinTable( name="user_group_has_property" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="property_id") } ) private Set<Property> properties; @ManyToMany @JoinTable( name="user_group_has_activity" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="activity_id") } ) private Set<Activity> activities; @ManyToMany @JoinTable( name="user_group_can_assign_user_group" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="can_assign_user_group_id") } ) private Set<UserGroup> canAssignToUserGroups; @ManyToMany @JoinTable( name="user_group_can_assign_user" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="user_id") } ) private Set<User> canAssignToUsers; @Override public String toString() { return parentUserGroup == null ? name : parentUserGroup.getName() + "-" + name; } @Override public Long getId() { return id; } @Override public void setId(Object id) { this.id = (Long) id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public boolean getAccessAdminModule() { return accessAdminModule; } public void setAccessAdminModule(boolean accessAdminModule) { this.accessAdminModule = accessAdminModule; } public boolean getAccessAdminUsers() { return accessAdminUsers; } public void setAccessAdminUsers(boolean accessAdminUsers) { this.accessAdminUsers = accessAdminUsers; } public boolean getCanCreateAndDeleteUsers() { return canCreateAndDeleteUsers; } public void setCanCreateAndDeleteUsers(boolean canCreateAndDeleteUsers) { this.canCreateAndDeleteUsers = canCreateAndDeleteUsers; } public boolean getAccessAdminUserGroups() { return accessAdminUserGroups; } public void setAccessAdminUserGroups(boolean accessAdminUserGroups) { this.accessAdminUserGroups = accessAdminUserGroups; } public boolean getAccessHqlQuery() { return accessHqlQuery; } public void setAccessHqlQuery(boolean accessHqlQuery) { this.accessHqlQuery = accessHqlQuery; } public boolean getAccessAdminHqlReports() { return accessAdminHqlReports; } public void setAccessAdminHqlReports(boolean accessAdminHqlReports) { this.accessAdminHqlReports = accessAdminHqlReports; } public boolean getAccessAuditLog() { return accessAuditLog; } public void setAccessAuditLog(boolean accessAuditLog) { this.accessAuditLog = accessAuditLog; } public boolean getAccessLogFiles() { return accessLogFiles; } public void setAccessLogFiles(boolean accessLogFiles) { this.accessLogFiles = accessLogFiles; } public boolean getAccesoConfigModule() { return accesoConfigModule; } public void setAccesoConfigModule(boolean accesoConfigModule) { this.accesoConfigModule = accesoConfigModule; } public boolean getAccessConfigProperties() { return accessConfigProperties; } public void setAccessConfigProperties(boolean accessConfigProperties) { this.accessConfigProperties = accessConfigProperties; } public boolean getAccessConfigValidations() { return accessConfigValidations; } public void setAccessConfigValidations(boolean accessConfigValidations) { this.accessConfigValidations = accessConfigValidations; } public boolean getAccessConfigNumeration() { return accessConfigNumeration; } public void setAccessConfigNumeration(boolean accessConfigNumeration) { this.accessConfigNumeration = accessConfigNumeration; } public boolean getAccessConfigForms() { return accessConfigForms; } public void setAccessConfigForms(boolean accessConfigForms) { this.accessConfigForms = accessConfigForms; } public boolean getAccessConfigActivities() { return accessConfigActivities; } public void setAccessConfigActivities(boolean accessConfigActivities) { this.accessConfigActivities = accessConfigActivities; } public boolean getAccessConfigNotifications() { return accessConfigNotifications; } public void setAccessConfigNotifications(boolean accessConfigNotifications) { this.accessConfigNotifications = accessConfigNotifications; } public boolean getAccessConfigClassifications() { return accessConfigClassifications; } public void setAccessConfigClassifications(boolean accessConfigClassifications) { this.accessConfigClassifications = accessConfigClassifications; } public boolean getAccessConfigClassificationValues() { return accessConfigClassificationValues; } public void setAccessConfigClassificationValues(boolean accessConfigClassificationValues) { this.accessConfigClassificationValues = accessConfigClassificationValues; } public boolean getAccessAdminCases() { return accessAdminCases; } public void setAccessAdminCases(boolean accessAdminCases) { this.accessAdminCases = accessAdminCases; } public boolean getAccessAdminPropertyValues() { return accessAdminPropertyValues; } public void setAccessAdminPropertyValues(boolean accessAdminPropertyValues) { this.accessAdminPropertyValues = accessAdminPropertyValues; } public boolean getAccessAdminActivityInstances() { return accessAdminActivityInstances; } public void setAccessAdminActivityInstances(boolean accessAdminActivityInstances) { this.accessAdminActivityInstances = accessAdminActivityInstances; } public boolean getAccessAdminNotificationInstances() { return accessAdminNotificationInstances; } public void setAccessAdminNotificationInstances(boolean accessAdminNotificationInstances) { this.accessAdminNotificationInstances = accessAdminNotificationInstances; } public boolean getAccessBasicModule() { return accessBasicModule; } public void setAccessBasicModule(boolean accessBasicModule) { this.accessBasicModule = accessBasicModule; } public boolean getAccessLastActivityInstances() { return accessLastActivityInstances; } public void setAccessLastActivityInstances(boolean accessLastActivityInstances) { this.accessLastActivityInstances = accessLastActivityInstances; } public boolean getAccessAdminJavaReports() { return accessAdminJavaReports; } public void setAccessAdminJavaReports(boolean accessAdminJavaReports) { this.accessAdminJavaReports = accessAdminJavaReports; } public UserGroup getParentUserGroup() { return parentUserGroup; } public void setParentUserGroup(UserGroup parentUserGroup) { this.parentUserGroup = parentUserGroup; } public Set<Form> getAccessSearchForm() { return accessSearchForm; } public void setAccessSearchForm(Set<Form> accessSearchForm) { this.accessSearchForm = accessSearchForm; } public Set<User> getUsers() { return users; } public void setUsers(Set<User> users) { this.users = users; } public Set<Property> getProperties() { return properties; } public void setProperties(Set<Property> properties) { this.properties = properties; } public Set<Activity> getActivities() { return activities; } public void setActivities(Set<Activity> activities) { this.activities = activities; } public Set<UserGroup> getCanAssignToUserGroups() { return canAssignToUserGroups; } public void setCanAssignToUserGroups(Set<UserGroup> canAssignToUserGroups) { this.canAssignToUserGroups = canAssignToUserGroups; } public Set<User> getCanAssignToUsers() { return canAssignToUsers; } public void setCanAssignToUsers(Set<User> canAssignToUsers) { this.canAssignToUsers = canAssignToUsers; } public Set<JavaReport> getJavaReports() { return javaReports; } public void setJavaReports(Set<JavaReport> javaReports) { this.javaReports = javaReports; } }
src/infodoc/core/dto/UserGroup.java
package infodoc.core.dto; import java.io.Serializable; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.Table; import javax.persistence.UniqueConstraint; import enterpriseapp.hibernate.annotation.CrudTable; import enterpriseapp.hibernate.dto.Dto; @Entity @Table(name="user_group", uniqueConstraints={@UniqueConstraint(columnNames={"name", "parent_user_group_id"})}) @CrudTable(filteringPropertyName="nombre") public class UserGroup extends Dto implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy=GenerationType.AUTO) @Column(name="id") private Long id; @Column(name="name", nullable=false) private String name; @Column(name="access_admin_module", nullable=false) private boolean accessAdminModule; @Column(name="access_admin_users", nullable=false) private boolean accessAdminUsers; @Column(name="access_admin_user_groups", nullable=false) private boolean accessAdminUserGroups; @Column(name="access_admin_forms", nullable=false) private boolean accessAdminCases; @Column(name="access_admin_property_values", nullable=false) private boolean accessAdminPropertyValues; @Column(name="access_admin_activity_instances", nullable=false) private boolean accessAdminActivityInstances; @Column(name="can_create_modify_users", nullable=false) private boolean canCreateAndModifyUsers; @Column(name="access_hql_query", nullable=false) private boolean accessHqlQuery; @Column(name="access_admin_hql_reports", nullable=false) private boolean accessAdminHqlReports; @Column(name="access_log_files", nullable=false) private boolean accessLogFiles; @Column(name="access_audit_log", nullable=false) private boolean accessAuditLog; @Column(name="access_config_module", nullable=false) private boolean accesoConfigModule; @Column(name="access_config_properties", nullable=false) private boolean accessConfigProperties; @Column(name="access_config_validations", nullable=false) private boolean accessConfigValidations; @Column(name="access_config_numeration", nullable=false) private boolean accessConfigNumeration; @Column(name="access_config_forms", nullable=false) private boolean accessConfigForms; @Column(name="access_config_activities", nullable=false) private boolean accessConfigActivities; @Column(name="access_config_notifications", nullable=false) private boolean accessConfigNotifications; @Column(name="access_config_classifications", nullable=false) private boolean accessConfigClassifications; @Column(name="access_config_classification_values", nullable=false) private boolean accessConfigClassificationValues; @Column(name="access_admin_notification_instances", nullable=false) private boolean accessAdminNotificationInstances; @Column(name="access_basic_module", nullable=false) private boolean accessBasicModule; @Column(name="access_last_activity_instances", nullable=false) private boolean accessLastActivityInstances; @Column(name="access_admin_java_reports", nullable=false) private boolean accessAdminJavaReports; @ManyToOne @JoinColumn(name="parent_user_group_id") private UserGroup parentUserGroup; @ManyToMany @JoinTable( name="user_group_searches_form" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="form_id") } ) private Set<Form> accessSearchForm; @ManyToMany @JoinTable( name="user_group_has_java_report" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="java_report_id") } ) private Set<JavaReport> javaReports; @ManyToMany(mappedBy="userGroup") private Set<User> users; @ManyToMany @JoinTable( name="user_group_has_property" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="property_id") } ) private Set<Property> properties; @ManyToMany @JoinTable( name="user_group_has_activity" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="activity_id") } ) private Set<Activity> activities; @ManyToMany @JoinTable( name="user_group_can_assign_user_group" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="can_assign_user_group_id") } ) private Set<UserGroup> canAssignToUserGroups; @ManyToMany @JoinTable( name="user_group_can_assign_user" , joinColumns={ @JoinColumn(name="user_group_id") } , inverseJoinColumns={ @JoinColumn(name="user_id") } ) private Set<User> canAssignToUsers; @Override public String toString() { return parentUserGroup == null ? name : parentUserGroup.getName() + "-" + name; } @Override public Long getId() { return id; } @Override public void setId(Object id) { this.id = (Long) id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public boolean getAccessAdminModule() { return accessAdminModule; } public void setAccessAdminModule(boolean accessAdminModule) { this.accessAdminModule = accessAdminModule; } public boolean getAccessAdminUsers() { return accessAdminUsers; } public void setAccessAdminUsers(boolean accessAdminUsers) { this.accessAdminUsers = accessAdminUsers; } public boolean getCanCreateAndModifyUsers() { return canCreateAndModifyUsers; } public void setCanCreateAndModifyUsers(boolean canCreateAndModifyUsers) { this.canCreateAndModifyUsers = canCreateAndModifyUsers; } public boolean getAccessAdminUserGroups() { return accessAdminUserGroups; } public void setAccessAdminUserGroups(boolean accessAdminUserGroups) { this.accessAdminUserGroups = accessAdminUserGroups; } public boolean getAccessHqlQuery() { return accessHqlQuery; } public void setAccessHqlQuery(boolean accessHqlQuery) { this.accessHqlQuery = accessHqlQuery; } public boolean getAccessAdminHqlReports() { return accessAdminHqlReports; } public void setAccessAdminHqlReports(boolean accessAdminHqlReports) { this.accessAdminHqlReports = accessAdminHqlReports; } public boolean getAccessAuditLog() { return accessAuditLog; } public void setAccessAuditLog(boolean accessAuditLog) { this.accessAuditLog = accessAuditLog; } public boolean getAccessLogFiles() { return accessLogFiles; } public void setAccessLogFiles(boolean accessLogFiles) { this.accessLogFiles = accessLogFiles; } public boolean getAccesoConfigModule() { return accesoConfigModule; } public void setAccesoConfigModule(boolean accesoConfigModule) { this.accesoConfigModule = accesoConfigModule; } public boolean getAccessConfigProperties() { return accessConfigProperties; } public void setAccessConfigProperties(boolean accessConfigProperties) { this.accessConfigProperties = accessConfigProperties; } public boolean getAccessConfigValidations() { return accessConfigValidations; } public void setAccessConfigValidations(boolean accessConfigValidations) { this.accessConfigValidations = accessConfigValidations; } public boolean getAccessConfigNumeration() { return accessConfigNumeration; } public void setAccessConfigNumeration(boolean accessConfigNumeration) { this.accessConfigNumeration = accessConfigNumeration; } public boolean getAccessConfigForms() { return accessConfigForms; } public void setAccessConfigForms(boolean accessConfigForms) { this.accessConfigForms = accessConfigForms; } public boolean getAccessConfigActivities() { return accessConfigActivities; } public void setAccessConfigActivities(boolean accessConfigActivities) { this.accessConfigActivities = accessConfigActivities; } public boolean getAccessConfigNotifications() { return accessConfigNotifications; } public void setAccessConfigNotifications(boolean accessConfigNotifications) { this.accessConfigNotifications = accessConfigNotifications; } public boolean getAccessConfigClassifications() { return accessConfigClassifications; } public void setAccessConfigClassifications(boolean accessConfigClassifications) { this.accessConfigClassifications = accessConfigClassifications; } public boolean getAccessConfigClassificationValues() { return accessConfigClassificationValues; } public void setAccessConfigClassificationValues(boolean accessConfigClassificationValues) { this.accessConfigClassificationValues = accessConfigClassificationValues; } public boolean getAccessAdminCases() { return accessAdminCases; } public void setAccessAdminCases(boolean accessAdminCases) { this.accessAdminCases = accessAdminCases; } public boolean getAccessAdminPropertyValues() { return accessAdminPropertyValues; } public void setAccessAdminPropertyValues(boolean accessAdminPropertyValues) { this.accessAdminPropertyValues = accessAdminPropertyValues; } public boolean getAccessAdminActivityInstances() { return accessAdminActivityInstances; } public void setAccessAdminActivityInstances(boolean accessAdminActivityInstances) { this.accessAdminActivityInstances = accessAdminActivityInstances; } public boolean getAccessAdminNotificationInstances() { return accessAdminNotificationInstances; } public void setAccessAdminNotificationInstances(boolean accessAdminNotificationInstances) { this.accessAdminNotificationInstances = accessAdminNotificationInstances; } public boolean getAccessBasicModule() { return accessBasicModule; } public void setAccessBasicModule(boolean accessBasicModule) { this.accessBasicModule = accessBasicModule; } public boolean getAccessLastActivityInstances() { return accessLastActivityInstances; } public void setAccessLastActivityInstances(boolean accessLastActivityInstances) { this.accessLastActivityInstances = accessLastActivityInstances; } public boolean getAccessAdminJavaReports() { return accessAdminJavaReports; } public void setAccessAdminJavaReports(boolean accessAdminJavaReports) { this.accessAdminJavaReports = accessAdminJavaReports; } public UserGroup getParentUserGroup() { return parentUserGroup; } public void setParentUserGroup(UserGroup parentUserGroup) { this.parentUserGroup = parentUserGroup; } public Set<Form> getAccessSearchForm() { return accessSearchForm; } public void setAccessSearchForm(Set<Form> accessSearchForm) { this.accessSearchForm = accessSearchForm; } public Set<User> getUsers() { return users; } public void setUsers(Set<User> users) { this.users = users; } public Set<Property> getProperties() { return properties; } public void setProperties(Set<Property> properties) { this.properties = properties; } public Set<Activity> getActivities() { return activities; } public void setActivities(Set<Activity> activities) { this.activities = activities; } public Set<UserGroup> getCanAssignToUserGroups() { return canAssignToUserGroups; } public void setCanAssignToUserGroups(Set<UserGroup> canAssignToUserGroups) { this.canAssignToUserGroups = canAssignToUserGroups; } public Set<User> getCanAssignToUsers() { return canAssignToUsers; } public void setCanAssignToUsers(Set<User> canAssignToUsers) { this.canAssignToUsers = canAssignToUsers; } public Set<JavaReport> getJavaReports() { return javaReports; } public void setJavaReports(Set<JavaReport> javaReports) { this.javaReports = javaReports; } }
Refactor in class field.
src/infodoc/core/dto/UserGroup.java
Refactor in class field.
Java
agpl-3.0
653984d2d822548548992808263c638331eccca8
0
fanout/tigase-server,f24-ag/tigase,cgvarela/tigase-server,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,wangningbo/tigase-server,wangningbo/tigase-server,fanout/tigase-server,amikey/tigase-server,amikey/tigase-server,wangningbo/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,caiyingyuan/tigase71,Smartupz/tigase-server,nate-sentjens/tigase-xmpp-java,pivotal-nathan-sentjens/tigase-xmpp-java,wangningbo/tigase-server,cgvarela/tigase-server,sourcebits-praveenkh/Tagase,fanout/tigase-server,amikey/tigase-server,f24-ag/tigase,amikey/tigase-server,nate-sentjens/tigase-xmpp-java,f24-ag/tigase,Smartupz/tigase-server,cgvarela/tigase-server,caiyingyuan/tigase71,pivotal-nathan-sentjens/tigase-xmpp-java,sourcebits-praveenkh/Tagase,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,fanout/tigase-server,Smartupz/tigase-server,caiyingyuan/tigase71,nate-sentjens/tigase-xmpp-java,amikey/tigase-server,Smartupz/tigase-server,sourcebits-praveenkh/Tagase,wangningbo/tigase-server,wangningbo/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,sourcebits-praveenkh/Tagase,cgvarela/tigase-server,caiyingyuan/tigase71,sourcebits-praveenkh/Tagase,f24-ag/tigase,wangningbo/tigase-server,caiyingyuan/tigase71,Smartupz/tigase-server,caiyingyuan/tigase71,pivotal-nathan-sentjens/tigase-xmpp-java,cgvarela/tigase-server,amikey/tigase-server,cgvarela/tigase-server,fanout/tigase-server,nate-sentjens/tigase-xmpp-java,fanout/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,sourcebits-praveenkh/Tagase,Smartupz/tigase-server
/* * Tigase Jabber/XMPP Server * Copyright (C) 2004-2007 "Artur Hefczyc" <[email protected]> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. Look for COPYING file in the top folder. * If not, see http://www.gnu.org/licenses/. * * $Rev$ * Last modified by $Author$ * $Date$ */ package tigase.db.jdbc; //~--- non-JDK imports -------------------------------------------------------- import tigase.db.AuthRepository; import tigase.db.AuthRepositoryImpl; import tigase.db.AuthorizationException; import tigase.db.DBInitException; import tigase.db.DataRepository; import tigase.db.RepositoryFactory; import tigase.db.TigaseDBException; import tigase.db.UserExistsException; import tigase.db.UserNotFoundException; import tigase.db.UserRepository; import tigase.util.SimpleCache; import tigase.xmpp.BareJID; //~--- JDK imports ------------------------------------------------------------ import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.logging.Level; import java.util.logging.Logger; //~--- classes ---------------------------------------------------------------- /** * Not synchronized implementation! Musn't be used by more than one thread at * the same time. * <p> * Thanks to Daniele for better unique IDs handling. Created: Thu Oct 26 * 11:48:53 2006 * </p> * * @author <a href="mailto:[email protected]">Artur Hefczyc</a> * @author <a href="mailto:[email protected]">Daniele</a> * @version $Rev$ */ public class JDBCRepository implements AuthRepository, UserRepository { private static final Logger log = Logger.getLogger(JDBCRepository.class.getName()); /** Field description */ public static final String DEF_USERS_TBL = "tig_users"; /** Field description */ public static final String DEF_NODES_TBL = "tig_nodes"; /** Field description */ public static final String DEF_PAIRS_TBL = "tig_pairs"; /** Field description */ public static final String DEF_MAXIDS_TBL = "tig_max_ids"; /** Field description */ public static final String DEF_ROOT_NODE = "root"; private static final String USER_STR = "User: "; private static final String GET_USER_DB_UID_QUERY = "{ call TigGetUserDBUid(?) }"; private static final String GET_USERS_COUNT_QUERY = "{ call TigAllUsersCount() }"; private static final String DEF_GET_USERS_QUERY = "{ call TigAllUsers() }"; private static final String PGSQL_GET_USERS_QUERY = "select TigAllUsers()"; private static final String ADD_USER_PLAIN_PW_QUERY = "{ call TigAddUserPlainPw(?, ?) }"; private static final String REMOVE_USER_QUERY = "{ call TigRemoveUser(?) }"; private static final String ADD_NODE_QUERY = "{ call TigAddNode(?, ?, ?) }"; private static final String COUNT_USERS_FOR_DOMAIN_QUERY = "select count(*) from tig_users where user_id like ?"; private static final String DATA_FOR_NODE_QUERY = "select pval from " + DEF_PAIRS_TBL + " where (nid = ?) AND (pkey = ?)"; private static final String KEYS_FOR_NODE_QUERY = "select pkey from " + DEF_PAIRS_TBL + " where (nid = ?)"; private static final String NODES_FOR_NODE_QUERY = "select nid, node from " + DEF_NODES_TBL + " where parent_nid = ?"; private static final String INSERT_KEY_VAL_QUERY = "insert into " + DEF_PAIRS_TBL + " (nid, uid, pkey, pval) " + " values (?, ?, ?, ?)"; private static final String REMOVE_KEY_DATA_QUERY = "delete from " + DEF_PAIRS_TBL + " where (nid = ?) AND (pkey = ?)"; private static final String UPDATE_PAIRS_QUERY = "{ call TigUpdatePairs(?, ?, ?, ?) }"; public static final String CURRENT_DB_SCHEMA_VER = "5.1"; public static final String SCHEMA_UPGRADE_LINK = "http://www.tigase.org/content/mysql-database-schema-upgrade-tigase-51"; /** Field description */ public static final String DERBY_GETSCHEMAVER_QUERY = "values TigGetDBProperty('schema-version')"; /** Field description */ public static final String JDBC_GETSCHEMAVER_QUERY = "select TigGetDBProperty('schema-version')"; // ~--- fields --------------------------------------------------------------- private AuthRepository auth = null; // Cache moved to connection pool private Map<String, Object> cache = null; private DataRepository data_repo = null; private String get_users_query = null; private boolean derby_mode = false; private boolean autoCreateUser = false; // ~--- methods -------------------------------------------------------------- private void addDataList(DataRepository repo, BareJID user_id, final String subnode, final String key, final String[] list) throws UserNotFoundException, SQLException, UserNotFoundException { long uid = -2; long nid = -2; try { // OK uid = getUserUID(repo, user_id, autoCreateUser); // OK nid = getNodeNID(repo, uid, subnode); if ( log.isLoggable( Level.FINEST ) ){ log.log( Level.FINEST, "Saving data adding data list, user_id: {0}, subnode: {1}, key: {2}, uid: {3}, nid: {4}, list: {5}", new Object[] { user_id, subnode, key, uid, nid, Arrays.toString( list ) } ); } if (nid < 0) { try { // OK nid = createNodePath(repo, user_id, subnode); } catch (SQLException e) { // This may happen in cluster node, when 2 nodes at the same // time write data to the same location, like offline messages.... // Let's try to get the nid again. // OK nid = getNodeNID(repo, uid, subnode); } } PreparedStatement insert_key_val_st = null; if (repo == null) { insert_key_val_st = data_repo.getPreparedStatement(user_id, INSERT_KEY_VAL_QUERY); } else { insert_key_val_st = repo.getPreparedStatement(user_id, INSERT_KEY_VAL_QUERY); } synchronized (insert_key_val_st) { insert_key_val_st.setLong(1, nid); insert_key_val_st.setLong(2, uid); insert_key_val_st.setString(3, key); for (String val : list) { insert_key_val_st.setString(4, val); insert_key_val_st.executeUpdate(); } // end of for (String val: list) } } catch (SQLException e) { log.log(Level.WARNING, "Error adding data list, user_id: " + user_id + ", subnode: " + subnode + ", key: " + key + ", uid: " + uid + ", nid: " + nid + ", list: " + Arrays.toString(list), e); throw e; } // cache.put(user_id+"/"+subnode+"/"+key, list); } /** * Describe <code>addDataList</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param list * a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void addDataList(BareJID user_id, final String subnode, final String key, final String[] list) throws UserNotFoundException, TigaseDBException { try { addDataList(null, user_id, subnode, key, list); } catch (SQLException ex) { throw new TigaseDBException("Problem adding data list to repository", ex); } } /** * Describe <code>addUser</code> method here. * * @param user_id * a <code>String</code> value * @exception UserExistsException * if an error occurs * @throws TigaseDBException */ @Override public void addUser(BareJID user_id) throws UserExistsException, TigaseDBException { try { addUserRepo(null, user_id); } catch (SQLException e) { throw new UserExistsException("Error adding user to repository: ", e); } } /** * Describe <code>addUser</code> method here. * * @param user * a <code>String</code> value * @param password * a <code>String</code> value * @exception UserExistsException * if an error occurs * @exception TigaseDBException * if an error occurs */ @Override public void addUser(BareJID user, final String password) throws UserExistsException, TigaseDBException { auth.addUser(user, password); } /** * Describe <code>digestAuth</code> method here. * * @param user * a <code>String</code> value * @param digest * a <code>String</code> value * @param id * a <code>String</code> value * @param alg * a <code>String</code> value * @return a <code>boolean</code> value * * @throws AuthorizationException * @exception UserNotFoundException * if an error occurs * @exception TigaseDBException * if an error occurs */ @Override @Deprecated public boolean digestAuth(BareJID user, final String digest, final String id, final String alg) throws UserNotFoundException, TigaseDBException, AuthorizationException { return auth.digestAuth(user, digest, id, alg); } // ~--- get methods ---------------------------------------------------------- /** * Describe <code>getData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param def * a <code>String</code> value * @return a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String getData(BareJID user_id, final String subnode, final String key, final String def) throws UserNotFoundException, TigaseDBException { // String[] cache_res = (String[])cache.get(user_id+"/"+subnode+"/"+key); // if (cache_res != null) { // return cache_res[0]; // } // end of if (result != null) ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Loading data for key: {0}, user: {1}, node: {2}, def: {3}, found nid: {4}", new Object[] { key, user_id, subnode, def, nid }); } PreparedStatement data_for_node_st = data_repo.getPreparedStatement(user_id, DATA_FOR_NODE_QUERY); synchronized (data_for_node_st) { if (nid > 0) { String result = def; data_for_node_st.setLong(1, nid); data_for_node_st.setString(2, key); rs = data_for_node_st.executeQuery(); if (rs.next()) { result = rs.getString(1); if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Found data: {0}", result); } } // cache.put(user_id+"/"+subnode+"/"+key, new String[] {result}); return result; } else { return def; } // end of if (nid > 0) else } } catch (SQLException e) { throw new TigaseDBException("Error getting user data for: " + user_id + "/" + subnode + "/" + key, e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @return a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String getData(BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { return getData(user_id, subnode, key, null); } /** * Describe <code>getData</code> method here. * * @param user_id * a <code>String</code> value * @param key * a <code>String</code> value * @return a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String getData(BareJID user_id, final String key) throws UserNotFoundException, TigaseDBException { return getData(user_id, null, key, null); } /** * Describe <code>getDataList</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getDataList(BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { // String[] cache_res = (String[])cache.get(user_id+"/"+subnode+"/"+key); // if (cache_res != null) { // return cache_res; // } // end of if (result != null) ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); if ( log.isLoggable( Level.FINEST ) ){ log.log( Level.FINEST, "Loading data for key: {0}, user: {1}, node: {2}, found nid: {3}", new Object[] { key, user_id, subnode, nid } ); } PreparedStatement data_for_node_st = data_repo.getPreparedStatement(user_id, DATA_FOR_NODE_QUERY); synchronized (data_for_node_st) { if (nid > 0) { List<String> results = new ArrayList<String>(); data_for_node_st.setLong(1, nid); data_for_node_st.setString(2, key); rs = data_for_node_st.executeQuery(); while (rs.next()) { results.add(rs.getString(1)); if ( log.isLoggable( Level.FINEST ) ){ log.log( Level.FINEST, "Found data: {0}", rs.getString(1) ); } } String[] result = (results.size() == 0) ? null : results.toArray(new String[results.size()]); // cache.put(user_id+"/"+subnode+"/"+key, result); return result; } else { return null; } // end of if (nid > 0) else } } catch (SQLException e) { throw new TigaseDBException("Error getting data list for: " + user_id + "/" + subnode + "/" + key, e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getKeys</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getKeys(BareJID user_id, final String subnode) throws UserNotFoundException, TigaseDBException { ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); if (nid > 0) { List<String> results = new ArrayList<String>(); PreparedStatement keys_for_node_st = data_repo.getPreparedStatement(user_id, KEYS_FOR_NODE_QUERY); synchronized (keys_for_node_st) { keys_for_node_st.setLong(1, nid); rs = keys_for_node_st.executeQuery(); while (rs.next()) { results.add(rs.getString(1)); } return (results.size() == 0) ? null : results .toArray(new String[results.size()]); } } else { return null; } // end of if (nid > 0) else } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getKeys</code> method here. * * @param user_id * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getKeys(BareJID user_id) throws UserNotFoundException, TigaseDBException { return getKeys(user_id, null); } /** * Method description * * * @return */ @Override public String getResourceUri() { return data_repo.getResourceUri(); } /** * Describe <code>getSubnodes</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getSubnodes(BareJID user_id, final String subnode) throws UserNotFoundException, TigaseDBException { ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); PreparedStatement nodes_for_node_st = data_repo.getPreparedStatement(user_id, NODES_FOR_NODE_QUERY); synchronized (nodes_for_node_st) { if (nid > 0) { List<String> results = new ArrayList<String>(); nodes_for_node_st.setLong(1, nid); rs = nodes_for_node_st.executeQuery(); while (rs.next()) { results.add(rs.getString(2)); } return (results.size() == 0) ? null : results .toArray(new String[results.size()]); } else { return null; } // end of if (nid > 0) else } } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getSubnodes</code> method here. * * @param user_id * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getSubnodes(BareJID user_id) throws UserNotFoundException, TigaseDBException { return getSubnodes(user_id, null); } /** * Method description * * * @param user_id * * @return * * @throws TigaseDBException */ @Override public long getUserUID(BareJID user_id) throws TigaseDBException { Long cache_res = (Long) cache.get(user_id.toString()); if (cache_res != null) { return cache_res.longValue(); } // end of if (result != null) long result = -1; try { result = getUserUID(null, user_id); } catch (SQLException e) { throw new TigaseDBException("Error retrieving user UID from repository: ", e); } cache.put(user_id.toString(), Long.valueOf(result)); return result; } public long getUserUID(DataRepository repo, BareJID user_id) throws SQLException { ResultSet rs = null; long result = -1; try { PreparedStatement uid_sp = null; if (repo == null) { uid_sp = data_repo.getPreparedStatement(user_id, GET_USER_DB_UID_QUERY); } else { uid_sp = repo.getPreparedStatement(user_id, GET_USER_DB_UID_QUERY); } synchronized (uid_sp) { uid_sp.setString(1, user_id.toString()); rs = uid_sp.executeQuery(); if (rs.next()) { result = rs.getLong(1); } else { result = -1; } } } finally { data_repo.release(null, rs); } return result; } /** * <code>getUsers</code> method is thread safe. * * @return a <code>List</code> of user IDs from database. * * @throws TigaseDBException */ @Override public List<BareJID> getUsers() throws TigaseDBException { ResultSet rs = null; List<BareJID> users = null; try { PreparedStatement all_users_sp = data_repo.getPreparedStatement(null, get_users_query); synchronized (all_users_sp) { // Load all user ids from database rs = all_users_sp.executeQuery(); users = new ArrayList<BareJID>(1000); while (rs.next()) { users.add(BareJID.bareJIDInstanceNS(rs.getString(1))); } // end of while (rs.next()) } } catch (SQLException e) { throw new TigaseDBException("Problem loading user list from repository", e); } finally { data_repo.release(null, rs); rs = null; } return users; } /** * <code>getUsersCount</code> method is thread safe. It uses local variable * for storing <code>Statement</code>. * * @return a <code>long</code> number of user accounts in database. */ @Override public long getUsersCount() { ResultSet rs = null; try { long users = -1; PreparedStatement users_count_sp = data_repo.getPreparedStatement(null, GET_USERS_COUNT_QUERY); synchronized (users_count_sp) { // Load all user count from database rs = users_count_sp.executeQuery(); if (rs.next()) { users = rs.getLong(1); } // end of while (rs.next()) } return users; } catch (SQLException e) { return -1; // throw new // TigaseDBException("Problem loading user list from repository", e); } finally { data_repo.release(null, rs); rs = null; } } /** * Method description * * * @param domain * * @return */ @Override public long getUsersCount(String domain) { ResultSet rs = null; try { long users = -1; PreparedStatement users_domain_count_st = data_repo.getPreparedStatement(null, COUNT_USERS_FOR_DOMAIN_QUERY); synchronized (users_domain_count_st) { // Load all user count from database users_domain_count_st.setString(1, "%@" + domain); rs = users_domain_count_st.executeQuery(); if (rs.next()) { users = rs.getLong(1); } // end of while (rs.next()) } return users; } catch (SQLException e) { return -1; // throw new // TigaseDBException("Problem loading user list from repository", e); } finally { data_repo.release(null, rs); rs = null; } } // ~--- methods -------------------------------------------------------------- /** * Describe <code>initRepository</code> method here. * * @param connection_str * a <code>String</code> value * @param params * * @throws DBInitException */ @Override public void initRepository(final String connection_str, Map<String, String> params) throws DBInitException { try { derby_mode = connection_str.startsWith("jdbc:derby"); data_repo = RepositoryFactory.getDataRepository(null, connection_str, params); checkDBSchema(); if (connection_str.contains("autoCreateUser=true")) { autoCreateUser = true; } // end of if (db_conn.contains()) if (connection_str.contains("cacheRepo=off")) { log.fine("Disabling cache."); cache = Collections.synchronizedMap(new RepoCache(0, -1000)); } else { cache = Collections.synchronizedMap(new RepoCache(10000, 60 * 1000)); } data_repo.initPreparedStatement(GET_USER_DB_UID_QUERY, GET_USER_DB_UID_QUERY); data_repo.initPreparedStatement(GET_USERS_COUNT_QUERY, GET_USERS_COUNT_QUERY); if (connection_str.startsWith("jdbc:postgresql")) { get_users_query = PGSQL_GET_USERS_QUERY; } else { get_users_query = DEF_GET_USERS_QUERY; } data_repo.initPreparedStatement(get_users_query, get_users_query); data_repo.initPreparedStatement(ADD_USER_PLAIN_PW_QUERY, ADD_USER_PLAIN_PW_QUERY); data_repo.initPreparedStatement(REMOVE_USER_QUERY, REMOVE_USER_QUERY); data_repo.initPreparedStatement(ADD_NODE_QUERY, ADD_NODE_QUERY); data_repo.initPreparedStatement(COUNT_USERS_FOR_DOMAIN_QUERY, COUNT_USERS_FOR_DOMAIN_QUERY); data_repo.initPreparedStatement(DATA_FOR_NODE_QUERY, DATA_FOR_NODE_QUERY); data_repo.initPreparedStatement(KEYS_FOR_NODE_QUERY, KEYS_FOR_NODE_QUERY); data_repo.initPreparedStatement(NODES_FOR_NODE_QUERY, NODES_FOR_NODE_QUERY); data_repo.initPreparedStatement(INSERT_KEY_VAL_QUERY, INSERT_KEY_VAL_QUERY); data_repo.initPreparedStatement(REMOVE_KEY_DATA_QUERY, REMOVE_KEY_DATA_QUERY); data_repo.initPreparedStatement(UPDATE_PAIRS_QUERY, UPDATE_PAIRS_QUERY); auth = new AuthRepositoryImpl(this); // initRepo(); log.log(Level.INFO, "Initialized database connection: {0}", connection_str); } catch (Exception e) { data_repo = null; throw new DBInitException( "Problem initializing jdbc connection: " + connection_str, e); } } /** * Method description * * * @param user * * @throws TigaseDBException * @throws UserNotFoundException */ @Override public void logout(BareJID user) throws UserNotFoundException, TigaseDBException { auth.logout(user); } /** * Describe <code>otherAuth</code> method here. * * @param props * a <code>Map</code> value * @return a <code>boolean</code> value * @exception UserNotFoundException * if an error occurs * @exception TigaseDBException * if an error occurs * @exception AuthorizationException * if an error occurs */ @Override public boolean otherAuth(final Map<String, Object> props) throws UserNotFoundException, TigaseDBException, AuthorizationException { return auth.otherAuth(props); } // Implementation of tigase.db.AuthRepository /** * Describe <code>plainAuth</code> method here. * * @param user * a <code>String</code> value * @param password * a <code>String</code> value * @return a <code>boolean</code> value * * @throws AuthorizationException * @exception UserNotFoundException * if an error occurs * @exception TigaseDBException * if an error occurs */ @Override @Deprecated public boolean plainAuth(BareJID user, final String password) throws UserNotFoundException, TigaseDBException, AuthorizationException { return auth.plainAuth(user, password); } /** * Method description * * * @param authProps */ @Override public void queryAuth(Map<String, Object> authProps) { auth.queryAuth(authProps); } /** * Describe <code>removeData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void removeData(BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { removeData(null, user_id, subnode, key); } private void removeData(DataRepository repo, BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { // cache.remove(user_id+"/"+subnode+"/"+key); try { long nid = getNodeNID(repo, user_id, subnode); PreparedStatement remove_key_data_st = null; if (repo == null) { remove_key_data_st = data_repo.getPreparedStatement(user_id, REMOVE_KEY_DATA_QUERY); } else { remove_key_data_st = repo.getPreparedStatement(user_id, REMOVE_KEY_DATA_QUERY); } synchronized (remove_key_data_st) { if (nid > 0) { remove_key_data_st.setLong(1, nid); remove_key_data_st.setString(2, key); remove_key_data_st.executeUpdate(); } } } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } } /** * Describe <code>removeData</code> method here. * * @param user_id * a <code>String</code> value * @param key * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void removeData(BareJID user_id, final String key) throws UserNotFoundException, TigaseDBException { removeData(user_id, null, key); } /** * Describe <code>removeSubnode</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void removeSubnode(BareJID user_id, final String subnode) throws UserNotFoundException, TigaseDBException { if (subnode == null) { return; } // end of if (subnode == null) try { long nid = getNodeNID(null, user_id, subnode); if (nid > 0) { deleteSubnode(null, nid); cache.remove(user_id + "/" + subnode); } } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } } /** * <code>removeUser</code> method is thread safe. It uses local variable for * storing <code>Statement</code>. * * @param user_id * a <code>String</code> value the user Jabber ID. * * @throws TigaseDBException * @exception UserNotFoundException * if an error occurs */ @Override public void removeUser(BareJID user_id) throws UserNotFoundException, TigaseDBException { Statement stmt = null; ResultSet rs = null; String query = null; if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Removing user: {0}", user_id); } try { stmt = data_repo.createStatement(user_id); // Get user account uid long uid = getUserUID(null, user_id, autoCreateUser); // Remove all user enrties from pairs table query = "delete from " + DEF_PAIRS_TBL + " where uid = " + uid; stmt.executeUpdate(query); // Remove all user entries from nodes table query = "delete from " + DEF_NODES_TBL + " where uid = " + uid; stmt.executeUpdate(query); PreparedStatement user_del_sp = data_repo.getPreparedStatement(user_id, REMOVE_USER_QUERY); // Remove user account from users table synchronized (user_del_sp) { user_del_sp.setString(1, user_id.toString()); user_del_sp.executeUpdate(); } } catch (SQLException e) { throw new TigaseDBException("Error removing user from repository: " + query, e); } finally { data_repo.release(stmt, rs); stmt = null; cache.remove(user_id.toString()); // cache.clear(); } } // ~--- set methods ---------------------------------------------------------- /** * Describe <code>setData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param value * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void setData(BareJID user_id, final String subnode, final String key, final String value) throws UserNotFoundException, TigaseDBException { long uid = -2; long nid = -2; DataRepository repo = data_repo.takeRepoHandle(user_id); synchronized (repo) { try { uid = getUserUID(repo, user_id, autoCreateUser); nid = getNodeNID(repo, uid, subnode); if ( log.isLoggable( Level.FINEST ) ){ log.log( Level.FINEST, "Saving data setting data, user_id: {0}, subnode: {1}, key: {2}, uid: {3}, nid: {4}, value: {5}", new Object[] { user_id, subnode, key, uid, nid, value } ); } if (nid < 0) { try { // OK nid = createNodePath(repo, user_id, subnode); } catch (SQLException e) { // This may happen in cluster node, when 2 nodes at the same // time write data to the same location, like offline messages.... // Let's try to get the nid again. // OK nid = getNodeNID(repo, uid, subnode); } } PreparedStatement update_pairs_sp = repo.getPreparedStatement(user_id, UPDATE_PAIRS_QUERY); update_pairs_sp.setLong(1, nid); update_pairs_sp.setLong(2, uid); update_pairs_sp.setString(3, key); update_pairs_sp.setString(4, value); update_pairs_sp.executeUpdate(); } catch (SQLException e) { log.log(Level.WARNING, "Error setting data , user_id: " + user_id + ", subnode: " + subnode + ", key: " + key + ", uid: " + uid + ", nid: " + nid + ", value: " + value, e); } } } /** * Describe <code>setData</code> method here. * * @param user_id * a <code>String</code> value * @param key * a <code>String</code> value * @param value * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void setData(BareJID user_id, final String key, final String value) throws UserNotFoundException, TigaseDBException { setData(user_id, null, key, value); } /** * Describe <code>setDataList</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param list * a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void setDataList(BareJID user_id, final String subnode, final String key, final String[] list) throws UserNotFoundException, TigaseDBException { // Transactions may not yet work properly but at least let's make sure // both calls below are executed exclusively on the same DB connection DataRepository repo = data_repo.takeRepoHandle(user_id); synchronized (repo) { try { removeData(repo, user_id, subnode, key); try { addDataList(repo, user_id, subnode, key, list); } catch (SQLException ex) { throw new TigaseDBException("Problem adding data to DB, user_id: " + user_id + ", subnode: " + subnode + ", key: " + key + ", list: " + Arrays.toString(list), ex); } } finally { data_repo.releaseRepoHandle(repo); } } // int counter = 0; // boolean success = false; // DataRepository repo = data_repo.takeRepoHandle(); // try { // while (!success && ++counter < 4) { // try { // repo.startTransaction(); // removeData(repo, user_id, subnode, key); // addDataList(repo, user_id, subnode, key, list); // repo.commit(); // repo.endTransaction(); // success = true; // } catch (SQLException sqlex) { // try { // repo.rollback(); // repo.endTransaction(); // } catch (SQLException e) { // log.log(Level.WARNING, "Problem rolling-back transaction: ", e); // } // try { // Thread.sleep(10); // } catch (InterruptedException ex) { // } // } // } // } finally { // data_repo.releaseRepoHandle(repo); // } // if (!success) { // log.log(Level.WARNING, // "Unsuccessful dataList set, user_id: " + user_id + ", subnode: " + // subnode // + ", key: " + key + ", list: " + Arrays.toString(list)); // } } /** * Method description * * * @param user * @param password * * @throws TigaseDBException */ @Override public void updatePassword(BareJID user, final String password) throws TigaseDBException { auth.updatePassword(user, password); } /** * Method description * * * @param user * * @return */ @Override public boolean userExists(BareJID user) { try { getUserUID(null, user, false); return true; } catch (Exception e) { return false; } } private long addNode(DataRepository repo, long uid, long parent_nid, String node_name) throws SQLException { ResultSet rs = null; PreparedStatement node_add_sp = null; if (repo == null) { node_add_sp = data_repo.getPreparedStatement(null, ADD_NODE_QUERY); } else { node_add_sp = repo.getPreparedStatement(null, ADD_NODE_QUERY); } synchronized (node_add_sp) { try { if (parent_nid < 0) { node_add_sp.setNull(1, Types.BIGINT); } else { node_add_sp.setLong(1, parent_nid); } // end of else node_add_sp.setLong(2, uid); node_add_sp.setString(3, node_name); rs = node_add_sp.executeQuery(); if (rs.next()) { return rs.getLong(1); } else { log.warning("Missing NID after adding new node..."); return -1; // throw new TigaseDBException("Propeblem adding new node. " // + "The SP should return nid or fail"); } // end of if (isnext) else } finally { data_repo.release(null, rs); } } // return new_nid; } /** * <code>addUserRepo</code> method is thread safe. It uses local variable for * storing <code>Statement</code>. * * @param user_id * a <code>String</code> value of the user ID. * @return a <code>long</code> value of <code>uid</code> database user ID. * @exception SQLException * if an error occurs */ private long addUserRepo(DataRepository repo, BareJID user_id) throws SQLException { ResultSet rs = null; long uid = -1; PreparedStatement user_add_sp = null; if (repo == null) { user_add_sp = data_repo.getPreparedStatement(user_id, ADD_USER_PLAIN_PW_QUERY); } else { user_add_sp = repo.getPreparedStatement(user_id, ADD_USER_PLAIN_PW_QUERY); } synchronized (user_add_sp) { try { user_add_sp.setString(1, user_id.toString()); user_add_sp.setNull(2, Types.VARCHAR); rs = user_add_sp.executeQuery(); if (rs.next()) { uid = rs.getLong(1); // addNode(uid, -1, root_node); } else { log.warning("Missing UID after adding new user..."); // throw new // TigaseDBException("Propeblem adding new user to repository. " // + "The SP should return uid or fail"); } // end of if (isnext) else } finally { data_repo.release(null, rs); } } cache.put(user_id.toString(), Long.valueOf(uid)); return uid; } private String buildNodeQuery(long uid, String node_path) { String query = "select nid as nid1 from " + DEF_NODES_TBL + " where (uid = " + uid + ")" + " AND (parent_nid is null)" + " AND (node = '" + DEF_ROOT_NODE + "')"; if (node_path == null) { return query; } else { StringTokenizer strtok = new StringTokenizer(node_path, "/", false); int cnt = 1; String subquery = query; while (strtok.hasMoreTokens()) { String token = strtok.nextToken(); ++cnt; subquery = "select nid as nid" + cnt + ", node as node" + cnt + " from " + DEF_NODES_TBL + ", (" + subquery + ") nodes" + (cnt - 1) + " where (parent_nid = nid" + (cnt - 1) + ")" + " AND (node = '" + token + "')"; } // end of while (strtok.hasMoreTokens()) return subquery; } // end of else } // Implementation of tigase.db.UserRepository private void checkDBSchema() throws SQLException { String schema_version = "1.0"; String query = (derby_mode ? DERBY_GETSCHEMAVER_QUERY : JDBC_GETSCHEMAVER_QUERY); Statement stmt = data_repo.createStatement(null); ResultSet rs = stmt.executeQuery(query); try { if (rs.next()) { schema_version = rs.getString(1); if (false == CURRENT_DB_SCHEMA_VER.equals(schema_version)) { System.err.println("\n\nPlease upgrade database schema now."); System.err.println("Current scheme version is: " + schema_version + ", expected: " + CURRENT_DB_SCHEMA_VER); System.err.println("Check the schema upgrade guide at the address:"); System.err.println(SCHEMA_UPGRADE_LINK); System.err.println("----"); System.err.println("If you have upgraded your schema and you are still"); System.err.println("experiencing this problem please contact support at"); System.err.println("e-mail address: [email protected]"); // e.printStackTrace(); System.exit(100); } } } finally { data_repo.release(stmt, rs); } // end of try-catch } private long createNodePath(DataRepository repo, BareJID user_id, String node_path) throws SQLException, UserNotFoundException { if (node_path == null) { // Or should I throw NullPointerException? // OK return getNodeNID(repo, user_id, null); } // end of if (node_path == null) // OK long uid = getUserUID(repo, user_id, autoCreateUser); // OK long nid = getNodeNID(repo, uid, null); StringTokenizer strtok = new StringTokenizer(node_path, "/", false); StringBuilder built_path = new StringBuilder(); while (strtok.hasMoreTokens()) { String token = strtok.nextToken(); built_path.append("/").append(token); // OK long cur_nid = getNodeNID(repo, uid, built_path.toString()); if (cur_nid > 0) { nid = cur_nid; } else { // OK nid = addNode(repo, uid, nid, token); } // end of if (cur_nid > 0) else } // end of while (strtok.hasMoreTokens()) return nid; } private void deleteSubnode(DataRepository repo, long nid) throws SQLException { Statement stmt = null; ResultSet rs = null; String query = null; try { if (repo == null) { stmt = data_repo.createStatement(null); } else { stmt = repo.createStatement(null); } query = "delete from " + DEF_PAIRS_TBL + " where nid = " + nid; stmt.executeUpdate(query); query = "delete from " + DEF_NODES_TBL + " where nid = " + nid; stmt.executeUpdate(query); } finally { data_repo.release(stmt, rs); } } // ~--- get methods ---------------------------------------------------------- private long getNodeNID(DataRepository repo, long uid, String node_path) throws SQLException, UserNotFoundException { String query = buildNodeQuery(uid, node_path); if (log.isLoggable(Level.FINEST)) { log.finest(query); } Statement stmt = null; ResultSet rs = null; long nid = -1; try { if (repo == null) { stmt = data_repo.createStatement(null); } else { stmt = repo.createStatement(null); } rs = stmt.executeQuery(query); if (rs.next()) { nid = rs.getLong(1); } else { nid = -1; } // end of if (isnext) else if (nid <= 0) { if (node_path == null) { log.info("Missing root node, database upgrade or bug in the code? Adding missing " + "root node now."); // OK nid = addNode(repo, uid, -1, "root"); } else { if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Missing nid for node path: {0} and uid: {1}", new Object[] { node_path, uid }); } } } return nid; } finally { data_repo.release(stmt, rs); stmt = null; rs = null; } } private long getNodeNID(DataRepository repo, BareJID user_id, String node_path) throws SQLException, UserNotFoundException { Long cache_res = (Long) cache.get(user_id + "/" + node_path); if (cache_res != null) { return cache_res.longValue(); } // end of if (result != null) // OK long uid = getUserUID(repo, user_id, autoCreateUser); // OK long result = getNodeNID(repo, uid, node_path); if (result > 0) { cache.put(user_id + "/" + node_path, Long.valueOf(result)); } // end of if (result > 0) return result; } private long getUserUID(DataRepository repo, BareJID user_id, boolean autoCreate) throws SQLException, UserNotFoundException { // OK long result = getUserUID(repo, user_id); if (result <= 0) { if (autoCreate) { // OK result = addUserRepo(repo, user_id); } else { throw new UserNotFoundException("User does not exist: " + user_id); } // end of if (autoCreate) else } // end of if (isnext) else return result; } // ~--- inner classes -------------------------------------------------------- private class RepoCache extends SimpleCache<String, Object> { /** * Constructs ... * * * @param maxsize * @param cache_time */ public RepoCache(int maxsize, long cache_time) { super(maxsize, cache_time); } // ~--- methods ------------------------------------------------------------ /** * Method description * * * @param key * * @return */ @Override public Object remove(Object key) { if (cache_off) { return null; } Object val = super.remove(key); String strk = key.toString(); Iterator<String> ks = keySet().iterator(); while (ks.hasNext()) { String k = ks.next().toString(); if (k.startsWith(strk)) { ks.remove(); } // end of if (k.startsWith(strk)) } // end of while (ks.hasNext()) return val; } } } // JDBCRepository // ~ Formatted in Sun Code Convention // ~ Formatted by Jindent --- http://www.jindent.com
src/main/java/tigase/db/jdbc/JDBCRepository.java
/* * Tigase Jabber/XMPP Server * Copyright (C) 2004-2007 "Artur Hefczyc" <[email protected]> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. Look for COPYING file in the top folder. * If not, see http://www.gnu.org/licenses/. * * $Rev$ * Last modified by $Author$ * $Date$ */ package tigase.db.jdbc; //~--- non-JDK imports -------------------------------------------------------- import tigase.db.AuthRepository; import tigase.db.AuthRepositoryImpl; import tigase.db.AuthorizationException; import tigase.db.DBInitException; import tigase.db.DataRepository; import tigase.db.RepositoryFactory; import tigase.db.TigaseDBException; import tigase.db.UserExistsException; import tigase.db.UserNotFoundException; import tigase.db.UserRepository; import tigase.util.SimpleCache; import tigase.xmpp.BareJID; //~--- JDK imports ------------------------------------------------------------ import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.logging.Level; import java.util.logging.Logger; //~--- classes ---------------------------------------------------------------- /** * Not synchronized implementation! Musn't be used by more than one thread at * the same time. * <p> * Thanks to Daniele for better unique IDs handling. Created: Thu Oct 26 * 11:48:53 2006 * </p> * * @author <a href="mailto:[email protected]">Artur Hefczyc</a> * @author <a href="mailto:[email protected]">Daniele</a> * @version $Rev$ */ public class JDBCRepository implements AuthRepository, UserRepository { private static final Logger log = Logger.getLogger(JDBCRepository.class.getName()); /** Field description */ public static final String DEF_USERS_TBL = "tig_users"; /** Field description */ public static final String DEF_NODES_TBL = "tig_nodes"; /** Field description */ public static final String DEF_PAIRS_TBL = "tig_pairs"; /** Field description */ public static final String DEF_MAXIDS_TBL = "tig_max_ids"; /** Field description */ public static final String DEF_ROOT_NODE = "root"; private static final String USER_STR = "User: "; private static final String GET_USER_DB_UID_QUERY = "{ call TigGetUserDBUid(?) }"; private static final String GET_USERS_COUNT_QUERY = "{ call TigAllUsersCount() }"; private static final String DEF_GET_USERS_QUERY = "{ call TigAllUsers() }"; private static final String PGSQL_GET_USERS_QUERY = "select TigAllUsers()"; private static final String ADD_USER_PLAIN_PW_QUERY = "{ call TigAddUserPlainPw(?, ?) }"; private static final String REMOVE_USER_QUERY = "{ call TigRemoveUser(?) }"; private static final String ADD_NODE_QUERY = "{ call TigAddNode(?, ?, ?) }"; private static final String COUNT_USERS_FOR_DOMAIN_QUERY = "select count(*) from tig_users where user_id like ?"; private static final String DATA_FOR_NODE_QUERY = "select pval from " + DEF_PAIRS_TBL + " where (nid = ?) AND (pkey = ?)"; private static final String KEYS_FOR_NODE_QUERY = "select pkey from " + DEF_PAIRS_TBL + " where (nid = ?)"; private static final String NODES_FOR_NODE_QUERY = "select nid, node from " + DEF_NODES_TBL + " where parent_nid = ?"; private static final String INSERT_KEY_VAL_QUERY = "insert into " + DEF_PAIRS_TBL + " (nid, uid, pkey, pval) " + " values (?, ?, ?, ?)"; private static final String REMOVE_KEY_DATA_QUERY = "delete from " + DEF_PAIRS_TBL + " where (nid = ?) AND (pkey = ?)"; public static final String CURRENT_DB_SCHEMA_VER = "5.1"; public static final String SCHEMA_UPGRADE_LINK = "http://www.tigase.org/content/mysql-database-schema-upgrade-tigase-51"; /** Field description */ public static final String DERBY_GETSCHEMAVER_QUERY = "values TigGetDBProperty('schema-version')"; /** Field description */ public static final String JDBC_GETSCHEMAVER_QUERY = "select TigGetDBProperty('schema-version')"; // ~--- fields --------------------------------------------------------------- private AuthRepository auth = null; // Cache moved to connection pool private Map<String, Object> cache = null; private DataRepository data_repo = null; private String get_users_query = null; private boolean derby_mode = false; private boolean autoCreateUser = false; // ~--- methods -------------------------------------------------------------- private void addDataList(DataRepository repo, BareJID user_id, final String subnode, final String key, final String[] list) throws UserNotFoundException, SQLException, UserNotFoundException { long uid = -2; long nid = -2; try { // OK uid = getUserUID(repo, user_id, autoCreateUser); // OK nid = getNodeNID(repo, uid, subnode); if ( log.isLoggable( Level.FINEST ) ){ log.log( Level.FINEST, "Saving data adding data list, user_id: {0}, subnode: {1}, key: {2}, uid: {3}, nid: {4}, list: {5}", new Object[] { user_id, subnode, key, uid, nid, Arrays.toString( list ) } ); } if (nid < 0) { try { // OK nid = createNodePath(repo, user_id, subnode); } catch (SQLException e) { // This may happen in cluster node, when 2 nodes at the same // time write data to the same location, like offline messages.... // Let's try to get the nid again. // OK nid = getNodeNID(repo, uid, subnode); } } PreparedStatement insert_key_val_st = null; if (repo == null) { insert_key_val_st = data_repo.getPreparedStatement(user_id, INSERT_KEY_VAL_QUERY); } else { insert_key_val_st = repo.getPreparedStatement(user_id, INSERT_KEY_VAL_QUERY); } synchronized (insert_key_val_st) { insert_key_val_st.setLong(1, nid); insert_key_val_st.setLong(2, uid); insert_key_val_st.setString(3, key); for (String val : list) { insert_key_val_st.setString(4, val); insert_key_val_st.executeUpdate(); } // end of for (String val: list) } } catch (SQLException e) { log.log(Level.WARNING, "Error adding data list, user_id: " + user_id + ", subnode: " + subnode + ", key: " + key + ", uid: " + uid + ", nid: " + nid + ", list: " + Arrays.toString(list), e); throw e; } // cache.put(user_id+"/"+subnode+"/"+key, list); } /** * Describe <code>addDataList</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param list * a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void addDataList(BareJID user_id, final String subnode, final String key, final String[] list) throws UserNotFoundException, TigaseDBException { try { addDataList(null, user_id, subnode, key, list); } catch (SQLException ex) { throw new TigaseDBException("Problem adding data list to repository", ex); } } /** * Describe <code>addUser</code> method here. * * @param user_id * a <code>String</code> value * @exception UserExistsException * if an error occurs * @throws TigaseDBException */ @Override public void addUser(BareJID user_id) throws UserExistsException, TigaseDBException { try { addUserRepo(null, user_id); } catch (SQLException e) { throw new UserExistsException("Error adding user to repository: ", e); } } /** * Describe <code>addUser</code> method here. * * @param user * a <code>String</code> value * @param password * a <code>String</code> value * @exception UserExistsException * if an error occurs * @exception TigaseDBException * if an error occurs */ @Override public void addUser(BareJID user, final String password) throws UserExistsException, TigaseDBException { auth.addUser(user, password); } /** * Describe <code>digestAuth</code> method here. * * @param user * a <code>String</code> value * @param digest * a <code>String</code> value * @param id * a <code>String</code> value * @param alg * a <code>String</code> value * @return a <code>boolean</code> value * * @throws AuthorizationException * @exception UserNotFoundException * if an error occurs * @exception TigaseDBException * if an error occurs */ @Override @Deprecated public boolean digestAuth(BareJID user, final String digest, final String id, final String alg) throws UserNotFoundException, TigaseDBException, AuthorizationException { return auth.digestAuth(user, digest, id, alg); } // ~--- get methods ---------------------------------------------------------- /** * Describe <code>getData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param def * a <code>String</code> value * @return a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String getData(BareJID user_id, final String subnode, final String key, final String def) throws UserNotFoundException, TigaseDBException { // String[] cache_res = (String[])cache.get(user_id+"/"+subnode+"/"+key); // if (cache_res != null) { // return cache_res[0]; // } // end of if (result != null) ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Loading data for key: {0}, user: {1}, node: {2}, def: {3}, found nid: {4}", new Object[] { key, user_id, subnode, def, nid }); } PreparedStatement data_for_node_st = data_repo.getPreparedStatement(user_id, DATA_FOR_NODE_QUERY); synchronized (data_for_node_st) { if (nid > 0) { String result = def; data_for_node_st.setLong(1, nid); data_for_node_st.setString(2, key); rs = data_for_node_st.executeQuery(); if (rs.next()) { result = rs.getString(1); if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Found data: {0}", result); } } // cache.put(user_id+"/"+subnode+"/"+key, new String[] {result}); return result; } else { return def; } // end of if (nid > 0) else } } catch (SQLException e) { throw new TigaseDBException("Error getting user data for: " + user_id + "/" + subnode + "/" + key, e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @return a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String getData(BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { return getData(user_id, subnode, key, null); } /** * Describe <code>getData</code> method here. * * @param user_id * a <code>String</code> value * @param key * a <code>String</code> value * @return a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String getData(BareJID user_id, final String key) throws UserNotFoundException, TigaseDBException { return getData(user_id, null, key, null); } /** * Describe <code>getDataList</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getDataList(BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { // String[] cache_res = (String[])cache.get(user_id+"/"+subnode+"/"+key); // if (cache_res != null) { // return cache_res; // } // end of if (result != null) ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); if ( log.isLoggable( Level.FINEST ) ){ log.log( Level.FINEST, "Loading data for key: {0}, user: {1}, node: {2}, found nid: {3}", new Object[] { key, user_id, subnode, nid } ); } PreparedStatement data_for_node_st = data_repo.getPreparedStatement(user_id, DATA_FOR_NODE_QUERY); synchronized (data_for_node_st) { if (nid > 0) { List<String> results = new ArrayList<String>(); data_for_node_st.setLong(1, nid); data_for_node_st.setString(2, key); rs = data_for_node_st.executeQuery(); while (rs.next()) { results.add(rs.getString(1)); if ( log.isLoggable( Level.FINEST ) ){ log.log( Level.FINEST, "Found data: {0}", rs.getString(1) ); } } String[] result = (results.size() == 0) ? null : results.toArray(new String[results.size()]); // cache.put(user_id+"/"+subnode+"/"+key, result); return result; } else { return null; } // end of if (nid > 0) else } } catch (SQLException e) { throw new TigaseDBException("Error getting data list for: " + user_id + "/" + subnode + "/" + key, e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getKeys</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getKeys(BareJID user_id, final String subnode) throws UserNotFoundException, TigaseDBException { ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); if (nid > 0) { List<String> results = new ArrayList<String>(); PreparedStatement keys_for_node_st = data_repo.getPreparedStatement(user_id, KEYS_FOR_NODE_QUERY); synchronized (keys_for_node_st) { keys_for_node_st.setLong(1, nid); rs = keys_for_node_st.executeQuery(); while (rs.next()) { results.add(rs.getString(1)); } return (results.size() == 0) ? null : results .toArray(new String[results.size()]); } } else { return null; } // end of if (nid > 0) else } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getKeys</code> method here. * * @param user_id * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getKeys(BareJID user_id) throws UserNotFoundException, TigaseDBException { return getKeys(user_id, null); } /** * Method description * * * @return */ @Override public String getResourceUri() { return data_repo.getResourceUri(); } /** * Describe <code>getSubnodes</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getSubnodes(BareJID user_id, final String subnode) throws UserNotFoundException, TigaseDBException { ResultSet rs = null; try { long nid = getNodeNID(null, user_id, subnode); PreparedStatement nodes_for_node_st = data_repo.getPreparedStatement(user_id, NODES_FOR_NODE_QUERY); synchronized (nodes_for_node_st) { if (nid > 0) { List<String> results = new ArrayList<String>(); nodes_for_node_st.setLong(1, nid); rs = nodes_for_node_st.executeQuery(); while (rs.next()) { results.add(rs.getString(2)); } return (results.size() == 0) ? null : results .toArray(new String[results.size()]); } else { return null; } // end of if (nid > 0) else } } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } finally { data_repo.release(null, rs); } } /** * Describe <code>getSubnodes</code> method here. * * @param user_id * a <code>String</code> value * @return a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public String[] getSubnodes(BareJID user_id) throws UserNotFoundException, TigaseDBException { return getSubnodes(user_id, null); } /** * Method description * * * @param user_id * * @return * * @throws TigaseDBException */ @Override public long getUserUID(BareJID user_id) throws TigaseDBException { Long cache_res = (Long) cache.get(user_id.toString()); if (cache_res != null) { return cache_res.longValue(); } // end of if (result != null) long result = -1; try { result = getUserUID(null, user_id); } catch (SQLException e) { throw new TigaseDBException("Error retrieving user UID from repository: ", e); } cache.put(user_id.toString(), Long.valueOf(result)); return result; } public long getUserUID(DataRepository repo, BareJID user_id) throws SQLException { ResultSet rs = null; long result = -1; try { PreparedStatement uid_sp = null; if (repo == null) { uid_sp = data_repo.getPreparedStatement(user_id, GET_USER_DB_UID_QUERY); } else { uid_sp = repo.getPreparedStatement(user_id, GET_USER_DB_UID_QUERY); } synchronized (uid_sp) { uid_sp.setString(1, user_id.toString()); rs = uid_sp.executeQuery(); if (rs.next()) { result = rs.getLong(1); } else { result = -1; } } } finally { data_repo.release(null, rs); } return result; } /** * <code>getUsers</code> method is thread safe. * * @return a <code>List</code> of user IDs from database. * * @throws TigaseDBException */ @Override public List<BareJID> getUsers() throws TigaseDBException { ResultSet rs = null; List<BareJID> users = null; try { PreparedStatement all_users_sp = data_repo.getPreparedStatement(null, get_users_query); synchronized (all_users_sp) { // Load all user ids from database rs = all_users_sp.executeQuery(); users = new ArrayList<BareJID>(1000); while (rs.next()) { users.add(BareJID.bareJIDInstanceNS(rs.getString(1))); } // end of while (rs.next()) } } catch (SQLException e) { throw new TigaseDBException("Problem loading user list from repository", e); } finally { data_repo.release(null, rs); rs = null; } return users; } /** * <code>getUsersCount</code> method is thread safe. It uses local variable * for storing <code>Statement</code>. * * @return a <code>long</code> number of user accounts in database. */ @Override public long getUsersCount() { ResultSet rs = null; try { long users = -1; PreparedStatement users_count_sp = data_repo.getPreparedStatement(null, GET_USERS_COUNT_QUERY); synchronized (users_count_sp) { // Load all user count from database rs = users_count_sp.executeQuery(); if (rs.next()) { users = rs.getLong(1); } // end of while (rs.next()) } return users; } catch (SQLException e) { return -1; // throw new // TigaseDBException("Problem loading user list from repository", e); } finally { data_repo.release(null, rs); rs = null; } } /** * Method description * * * @param domain * * @return */ @Override public long getUsersCount(String domain) { ResultSet rs = null; try { long users = -1; PreparedStatement users_domain_count_st = data_repo.getPreparedStatement(null, COUNT_USERS_FOR_DOMAIN_QUERY); synchronized (users_domain_count_st) { // Load all user count from database users_domain_count_st.setString(1, "%@" + domain); rs = users_domain_count_st.executeQuery(); if (rs.next()) { users = rs.getLong(1); } // end of while (rs.next()) } return users; } catch (SQLException e) { return -1; // throw new // TigaseDBException("Problem loading user list from repository", e); } finally { data_repo.release(null, rs); rs = null; } } // ~--- methods -------------------------------------------------------------- /** * Describe <code>initRepository</code> method here. * * @param connection_str * a <code>String</code> value * @param params * * @throws DBInitException */ @Override public void initRepository(final String connection_str, Map<String, String> params) throws DBInitException { try { derby_mode = connection_str.startsWith("jdbc:derby"); data_repo = RepositoryFactory.getDataRepository(null, connection_str, params); checkDBSchema(); if (connection_str.contains("autoCreateUser=true")) { autoCreateUser = true; } // end of if (db_conn.contains()) if (connection_str.contains("cacheRepo=off")) { log.fine("Disabling cache."); cache = Collections.synchronizedMap(new RepoCache(0, -1000)); } else { cache = Collections.synchronizedMap(new RepoCache(10000, 60 * 1000)); } data_repo.initPreparedStatement(GET_USER_DB_UID_QUERY, GET_USER_DB_UID_QUERY); data_repo.initPreparedStatement(GET_USERS_COUNT_QUERY, GET_USERS_COUNT_QUERY); if (connection_str.startsWith("jdbc:postgresql")) { get_users_query = PGSQL_GET_USERS_QUERY; } else { get_users_query = DEF_GET_USERS_QUERY; } data_repo.initPreparedStatement(get_users_query, get_users_query); data_repo.initPreparedStatement(ADD_USER_PLAIN_PW_QUERY, ADD_USER_PLAIN_PW_QUERY); data_repo.initPreparedStatement(REMOVE_USER_QUERY, REMOVE_USER_QUERY); data_repo.initPreparedStatement(ADD_NODE_QUERY, ADD_NODE_QUERY); data_repo.initPreparedStatement(COUNT_USERS_FOR_DOMAIN_QUERY, COUNT_USERS_FOR_DOMAIN_QUERY); data_repo.initPreparedStatement(DATA_FOR_NODE_QUERY, DATA_FOR_NODE_QUERY); data_repo.initPreparedStatement(KEYS_FOR_NODE_QUERY, KEYS_FOR_NODE_QUERY); data_repo.initPreparedStatement(NODES_FOR_NODE_QUERY, NODES_FOR_NODE_QUERY); data_repo.initPreparedStatement(INSERT_KEY_VAL_QUERY, INSERT_KEY_VAL_QUERY); data_repo.initPreparedStatement(REMOVE_KEY_DATA_QUERY, REMOVE_KEY_DATA_QUERY); auth = new AuthRepositoryImpl(this); // initRepo(); log.log(Level.INFO, "Initialized database connection: {0}", connection_str); } catch (Exception e) { data_repo = null; throw new DBInitException( "Problem initializing jdbc connection: " + connection_str, e); } } /** * Method description * * * @param user * * @throws TigaseDBException * @throws UserNotFoundException */ @Override public void logout(BareJID user) throws UserNotFoundException, TigaseDBException { auth.logout(user); } /** * Describe <code>otherAuth</code> method here. * * @param props * a <code>Map</code> value * @return a <code>boolean</code> value * @exception UserNotFoundException * if an error occurs * @exception TigaseDBException * if an error occurs * @exception AuthorizationException * if an error occurs */ @Override public boolean otherAuth(final Map<String, Object> props) throws UserNotFoundException, TigaseDBException, AuthorizationException { return auth.otherAuth(props); } // Implementation of tigase.db.AuthRepository /** * Describe <code>plainAuth</code> method here. * * @param user * a <code>String</code> value * @param password * a <code>String</code> value * @return a <code>boolean</code> value * * @throws AuthorizationException * @exception UserNotFoundException * if an error occurs * @exception TigaseDBException * if an error occurs */ @Override @Deprecated public boolean plainAuth(BareJID user, final String password) throws UserNotFoundException, TigaseDBException, AuthorizationException { return auth.plainAuth(user, password); } /** * Method description * * * @param authProps */ @Override public void queryAuth(Map<String, Object> authProps) { auth.queryAuth(authProps); } /** * Describe <code>removeData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void removeData(BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { removeData(null, user_id, subnode, key); } private void removeData(DataRepository repo, BareJID user_id, final String subnode, final String key) throws UserNotFoundException, TigaseDBException { // cache.remove(user_id+"/"+subnode+"/"+key); try { long nid = getNodeNID(repo, user_id, subnode); PreparedStatement remove_key_data_st = null; if (repo == null) { remove_key_data_st = data_repo.getPreparedStatement(user_id, REMOVE_KEY_DATA_QUERY); } else { remove_key_data_st = repo.getPreparedStatement(user_id, REMOVE_KEY_DATA_QUERY); } synchronized (remove_key_data_st) { if (nid > 0) { remove_key_data_st.setLong(1, nid); remove_key_data_st.setString(2, key); remove_key_data_st.executeUpdate(); } } } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } } /** * Describe <code>removeData</code> method here. * * @param user_id * a <code>String</code> value * @param key * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void removeData(BareJID user_id, final String key) throws UserNotFoundException, TigaseDBException { removeData(user_id, null, key); } /** * Describe <code>removeSubnode</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void removeSubnode(BareJID user_id, final String subnode) throws UserNotFoundException, TigaseDBException { if (subnode == null) { return; } // end of if (subnode == null) try { long nid = getNodeNID(null, user_id, subnode); if (nid > 0) { deleteSubnode(null, nid); cache.remove(user_id + "/" + subnode); } } catch (SQLException e) { throw new TigaseDBException("Error getting subnodes list.", e); } } /** * <code>removeUser</code> method is thread safe. It uses local variable for * storing <code>Statement</code>. * * @param user_id * a <code>String</code> value the user Jabber ID. * * @throws TigaseDBException * @exception UserNotFoundException * if an error occurs */ @Override public void removeUser(BareJID user_id) throws UserNotFoundException, TigaseDBException { Statement stmt = null; ResultSet rs = null; String query = null; if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Removing user: {0}", user_id); } try { stmt = data_repo.createStatement(user_id); // Get user account uid long uid = getUserUID(null, user_id, autoCreateUser); // Remove all user enrties from pairs table query = "delete from " + DEF_PAIRS_TBL + " where uid = " + uid; stmt.executeUpdate(query); // Remove all user entries from nodes table query = "delete from " + DEF_NODES_TBL + " where uid = " + uid; stmt.executeUpdate(query); PreparedStatement user_del_sp = data_repo.getPreparedStatement(user_id, REMOVE_USER_QUERY); // Remove user account from users table synchronized (user_del_sp) { user_del_sp.setString(1, user_id.toString()); user_del_sp.executeUpdate(); } } catch (SQLException e) { throw new TigaseDBException("Error removing user from repository: " + query, e); } finally { data_repo.release(stmt, rs); stmt = null; cache.remove(user_id.toString()); // cache.clear(); } } // ~--- set methods ---------------------------------------------------------- /** * Describe <code>setData</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param value * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void setData(BareJID user_id, final String subnode, final String key, final String value) throws UserNotFoundException, TigaseDBException { setDataList(user_id, subnode, key, new String[] { value }); } /** * Describe <code>setData</code> method here. * * @param user_id * a <code>String</code> value * @param key * a <code>String</code> value * @param value * a <code>String</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void setData(BareJID user_id, final String key, final String value) throws UserNotFoundException, TigaseDBException { setData(user_id, null, key, value); } /** * Describe <code>setDataList</code> method here. * * @param user_id * a <code>String</code> value * @param subnode * a <code>String</code> value * @param key * a <code>String</code> value * @param list * a <code>String[]</code> value * @exception UserNotFoundException * if an error occurs * @throws TigaseDBException */ @Override public void setDataList(BareJID user_id, final String subnode, final String key, final String[] list) throws UserNotFoundException, TigaseDBException { // Transactions may not yet work properly but at least let's make sure // both calls below are executed exclusively on the same DB connection DataRepository repo = data_repo.takeRepoHandle(user_id); synchronized (repo) { try { removeData(repo, user_id, subnode, key); try { addDataList(repo, user_id, subnode, key, list); } catch (SQLException ex) { throw new TigaseDBException("Problem adding data to DB, user_id: " + user_id + ", subnode: " + subnode + ", key: " + key + ", list: " + Arrays.toString(list), ex); } } finally { data_repo.releaseRepoHandle(repo); } } // int counter = 0; // boolean success = false; // DataRepository repo = data_repo.takeRepoHandle(); // try { // while (!success && ++counter < 4) { // try { // repo.startTransaction(); // removeData(repo, user_id, subnode, key); // addDataList(repo, user_id, subnode, key, list); // repo.commit(); // repo.endTransaction(); // success = true; // } catch (SQLException sqlex) { // try { // repo.rollback(); // repo.endTransaction(); // } catch (SQLException e) { // log.log(Level.WARNING, "Problem rolling-back transaction: ", e); // } // try { // Thread.sleep(10); // } catch (InterruptedException ex) { // } // } // } // } finally { // data_repo.releaseRepoHandle(repo); // } // if (!success) { // log.log(Level.WARNING, // "Unsuccessful dataList set, user_id: " + user_id + ", subnode: " + // subnode // + ", key: " + key + ", list: " + Arrays.toString(list)); // } } /** * Method description * * * @param user * @param password * * @throws TigaseDBException */ @Override public void updatePassword(BareJID user, final String password) throws TigaseDBException { auth.updatePassword(user, password); } /** * Method description * * * @param user * * @return */ @Override public boolean userExists(BareJID user) { try { getUserUID(null, user, false); return true; } catch (Exception e) { return false; } } private long addNode(DataRepository repo, long uid, long parent_nid, String node_name) throws SQLException { ResultSet rs = null; PreparedStatement node_add_sp = null; if (repo == null) { node_add_sp = data_repo.getPreparedStatement(null, ADD_NODE_QUERY); } else { node_add_sp = repo.getPreparedStatement(null, ADD_NODE_QUERY); } synchronized (node_add_sp) { try { if (parent_nid < 0) { node_add_sp.setNull(1, Types.BIGINT); } else { node_add_sp.setLong(1, parent_nid); } // end of else node_add_sp.setLong(2, uid); node_add_sp.setString(3, node_name); rs = node_add_sp.executeQuery(); if (rs.next()) { return rs.getLong(1); } else { log.warning("Missing NID after adding new node..."); return -1; // throw new TigaseDBException("Propeblem adding new node. " // + "The SP should return nid or fail"); } // end of if (isnext) else } finally { data_repo.release(null, rs); } } // return new_nid; } /** * <code>addUserRepo</code> method is thread safe. It uses local variable for * storing <code>Statement</code>. * * @param user_id * a <code>String</code> value of the user ID. * @return a <code>long</code> value of <code>uid</code> database user ID. * @exception SQLException * if an error occurs */ private long addUserRepo(DataRepository repo, BareJID user_id) throws SQLException { ResultSet rs = null; long uid = -1; PreparedStatement user_add_sp = null; if (repo == null) { user_add_sp = data_repo.getPreparedStatement(user_id, ADD_USER_PLAIN_PW_QUERY); } else { user_add_sp = repo.getPreparedStatement(user_id, ADD_USER_PLAIN_PW_QUERY); } synchronized (user_add_sp) { try { user_add_sp.setString(1, user_id.toString()); user_add_sp.setNull(2, Types.VARCHAR); rs = user_add_sp.executeQuery(); if (rs.next()) { uid = rs.getLong(1); // addNode(uid, -1, root_node); } else { log.warning("Missing UID after adding new user..."); // throw new // TigaseDBException("Propeblem adding new user to repository. " // + "The SP should return uid or fail"); } // end of if (isnext) else } finally { data_repo.release(null, rs); } } cache.put(user_id.toString(), Long.valueOf(uid)); return uid; } private String buildNodeQuery(long uid, String node_path) { String query = "select nid as nid1 from " + DEF_NODES_TBL + " where (uid = " + uid + ")" + " AND (parent_nid is null)" + " AND (node = '" + DEF_ROOT_NODE + "')"; if (node_path == null) { return query; } else { StringTokenizer strtok = new StringTokenizer(node_path, "/", false); int cnt = 1; String subquery = query; while (strtok.hasMoreTokens()) { String token = strtok.nextToken(); ++cnt; subquery = "select nid as nid" + cnt + ", node as node" + cnt + " from " + DEF_NODES_TBL + ", (" + subquery + ") nodes" + (cnt - 1) + " where (parent_nid = nid" + (cnt - 1) + ")" + " AND (node = '" + token + "')"; } // end of while (strtok.hasMoreTokens()) return subquery; } // end of else } // Implementation of tigase.db.UserRepository private void checkDBSchema() throws SQLException { String schema_version = "1.0"; String query = (derby_mode ? DERBY_GETSCHEMAVER_QUERY : JDBC_GETSCHEMAVER_QUERY); Statement stmt = data_repo.createStatement(null); ResultSet rs = stmt.executeQuery(query); try { if (rs.next()) { schema_version = rs.getString(1); if (false == CURRENT_DB_SCHEMA_VER.equals(schema_version)) { System.err.println("\n\nPlease upgrade database schema now."); System.err.println("Current scheme version is: " + schema_version + ", expected: " + CURRENT_DB_SCHEMA_VER); System.err.println("Check the schema upgrade guide at the address:"); System.err.println(SCHEMA_UPGRADE_LINK); System.err.println("----"); System.err.println("If you have upgraded your schema and you are still"); System.err.println("experiencing this problem please contact support at"); System.err.println("e-mail address: [email protected]"); // e.printStackTrace(); System.exit(100); } } } finally { data_repo.release(stmt, rs); } // end of try-catch } private long createNodePath(DataRepository repo, BareJID user_id, String node_path) throws SQLException, UserNotFoundException { if (node_path == null) { // Or should I throw NullPointerException? // OK return getNodeNID(repo, user_id, null); } // end of if (node_path == null) // OK long uid = getUserUID(repo, user_id, autoCreateUser); // OK long nid = getNodeNID(repo, uid, null); StringTokenizer strtok = new StringTokenizer(node_path, "/", false); StringBuilder built_path = new StringBuilder(); while (strtok.hasMoreTokens()) { String token = strtok.nextToken(); built_path.append("/").append(token); // OK long cur_nid = getNodeNID(repo, uid, built_path.toString()); if (cur_nid > 0) { nid = cur_nid; } else { // OK nid = addNode(repo, uid, nid, token); } // end of if (cur_nid > 0) else } // end of while (strtok.hasMoreTokens()) return nid; } private void deleteSubnode(DataRepository repo, long nid) throws SQLException { Statement stmt = null; ResultSet rs = null; String query = null; try { if (repo == null) { stmt = data_repo.createStatement(null); } else { stmt = repo.createStatement(null); } query = "delete from " + DEF_PAIRS_TBL + " where nid = " + nid; stmt.executeUpdate(query); query = "delete from " + DEF_NODES_TBL + " where nid = " + nid; stmt.executeUpdate(query); } finally { data_repo.release(stmt, rs); } } // ~--- get methods ---------------------------------------------------------- private long getNodeNID(DataRepository repo, long uid, String node_path) throws SQLException, UserNotFoundException { String query = buildNodeQuery(uid, node_path); if (log.isLoggable(Level.FINEST)) { log.finest(query); } Statement stmt = null; ResultSet rs = null; long nid = -1; try { if (repo == null) { stmt = data_repo.createStatement(null); } else { stmt = repo.createStatement(null); } rs = stmt.executeQuery(query); if (rs.next()) { nid = rs.getLong(1); } else { nid = -1; } // end of if (isnext) else if (nid <= 0) { if (node_path == null) { log.info("Missing root node, database upgrade or bug in the code? Adding missing " + "root node now."); // OK nid = addNode(repo, uid, -1, "root"); } else { if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "Missing nid for node path: {0} and uid: {1}", new Object[] { node_path, uid }); } } } return nid; } finally { data_repo.release(stmt, rs); stmt = null; rs = null; } } private long getNodeNID(DataRepository repo, BareJID user_id, String node_path) throws SQLException, UserNotFoundException { Long cache_res = (Long) cache.get(user_id + "/" + node_path); if (cache_res != null) { return cache_res.longValue(); } // end of if (result != null) // OK long uid = getUserUID(repo, user_id, autoCreateUser); // OK long result = getNodeNID(repo, uid, node_path); if (result > 0) { cache.put(user_id + "/" + node_path, Long.valueOf(result)); } // end of if (result > 0) return result; } private long getUserUID(DataRepository repo, BareJID user_id, boolean autoCreate) throws SQLException, UserNotFoundException { // OK long result = getUserUID(repo, user_id); if (result <= 0) { if (autoCreate) { // OK result = addUserRepo(repo, user_id); } else { throw new UserNotFoundException("User does not exist: " + user_id); } // end of if (autoCreate) else } // end of if (isnext) else return result; } // ~--- inner classes -------------------------------------------------------- private class RepoCache extends SimpleCache<String, Object> { /** * Constructs ... * * * @param maxsize * @param cache_time */ public RepoCache(int maxsize, long cache_time) { super(maxsize, cache_time); } // ~--- methods ------------------------------------------------------------ /** * Method description * * * @param key * * @return */ @Override public Object remove(Object key) { if (cache_off) { return null; } Object val = super.remove(key); String strk = key.toString(); Iterator<String> ks = keySet().iterator(); while (ks.hasNext()) { String k = ks.next().toString(); if (k.startsWith(strk)) { ks.remove(); } // end of if (k.startsWith(strk)) } // end of while (ks.hasNext()) return val; } } } // JDBCRepository // ~ Formatted in Sun Code Convention // ~ Formatted by Jindent --- http://www.jindent.com
Reimplementation of setData method of JDBCRepository to use TigUpdatePairs git-svn-id: 4a0daf30c0bbd291b3bc5fe8f058bf11ee523347@2891 7d282ba1-3ae6-0310-8f9b-c9008a0864d2
src/main/java/tigase/db/jdbc/JDBCRepository.java
Reimplementation of setData method of JDBCRepository to use TigUpdatePairs
Java
lgpl-2.1
0a484d11e7f2754485fa0fe6e4ccaa89a435b670
0
tomazzupan/wildfly,xasx/wildfly,tadamski/wildfly,iweiss/wildfly,rhusar/wildfly,99sono/wildfly,99sono/wildfly,tomazzupan/wildfly,golovnin/wildfly,xasx/wildfly,rhusar/wildfly,jstourac/wildfly,wildfly/wildfly,iweiss/wildfly,rhusar/wildfly,99sono/wildfly,tomazzupan/wildfly,golovnin/wildfly,golovnin/wildfly,tadamski/wildfly,jstourac/wildfly,pferraro/wildfly,pferraro/wildfly,xasx/wildfly,wildfly/wildfly,iweiss/wildfly,pferraro/wildfly,jstourac/wildfly,tadamski/wildfly,jstourac/wildfly,wildfly/wildfly,iweiss/wildfly,pferraro/wildfly,rhusar/wildfly,wildfly/wildfly
/* * JBoss, Home of Professional Open Source. * Copyright 2011, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.connector.subsystems.resourceadapters; import static org.jboss.as.connector.subsystems.resourceadapters.Constants.ARCHIVE; import static org.jboss.as.connector.subsystems.resourceadapters.Constants.MODULE; import static org.jboss.as.connector.subsystems.resourceadapters.Constants.RESOURCEADAPTERS_NAME; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR; import java.util.LinkedList; import java.util.List; import org.jboss.as.connector.logging.ConnectorLogger; import org.jboss.as.connector.util.ConnectorServices; import org.jboss.as.controller.OperationContext; import org.jboss.as.controller.OperationFailedException; import org.jboss.as.controller.OperationStepHandler; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.operations.common.Util; import org.jboss.dmr.ModelNode; import org.jboss.msc.service.ServiceController; import org.jboss.msc.service.ServiceName; /** * @author @author <a href="mailto:[email protected]">Stefano * Maestri</a> */ public class RaRemove implements OperationStepHandler { static final RaRemove INSTANCE = new RaRemove(); public void execute(OperationContext context, ModelNode operation) throws OperationFailedException { final ModelNode opAddr = operation.require(OP_ADDR); final String idName = PathAddress.pathAddress(opAddr).getLastElement().getValue(); final boolean isModule; // Compensating is add final ModelNode model = context.readResource(PathAddress.EMPTY_ADDRESS, false).getModel(); final String archiveOrModuleName; if (!model.hasDefined(ARCHIVE.getName()) && !model.hasDefined(MODULE.getName())) { throw ConnectorLogger.ROOT_LOGGER.archiveOrModuleRequired(); } if (model.get(ARCHIVE.getName()).isDefined()) { isModule = false; archiveOrModuleName = model.get(ARCHIVE.getName()).asString(); } else { isModule = true; archiveOrModuleName = model.get(MODULE.getName()).asString(); } final ModelNode compensating = Util.getEmptyOperation(ADD, opAddr); if (model.hasDefined(RESOURCEADAPTERS_NAME)) { for (ModelNode raNode : model.get(RESOURCEADAPTERS_NAME).asList()) { ModelNode raCompensatingNode = raNode.clone(); compensating.get(RESOURCEADAPTERS_NAME).add(raCompensatingNode); } } context.removeResource(PathAddress.EMPTY_ADDRESS); if (context.isDefaultRequiresRuntime()) { context.addStep(new OperationStepHandler() { public void execute(OperationContext context, ModelNode operation) throws OperationFailedException { final boolean wasActive; wasActive = RaOperationUtil.removeIfActive(context, archiveOrModuleName, idName); if (wasActive) { if (!context.isResourceServiceRestartAllowed()) { context.reloadRequired(); context.completeStep(new OperationContext.RollbackHandler() { @Override public void handleRollback(OperationContext context, ModelNode operation) { context.revertReloadRequired(); } }); return; } } ServiceName raServiceName = ServiceName.of(ConnectorServices.RA_SERVICE, idName); ServiceController<?> serviceController = context.getServiceRegistry(false).getService(raServiceName); final ModifiableResourceAdapter resourceAdapter; if (serviceController != null) { resourceAdapter = (ModifiableResourceAdapter) serviceController.getValue(); } else { resourceAdapter = null; } final List<ServiceName> serviceNameList = context.getServiceRegistry(false).getServiceNames(); for (ServiceName name : serviceNameList) { if (raServiceName.isParentOf(name)) { context.removeService(name); } } if (model.get(MODULE.getName()).isDefined()) { //ServiceName deploymentServiceName = ConnectorServices.getDeploymentServiceName(model.get(MODULE.getName()).asString(),raId); //context.removeService(deploymentServiceName); ServiceName deployerServiceName = ConnectorServices.RESOURCE_ADAPTER_DEPLOYER_SERVICE_PREFIX.append(idName); context.removeService(deployerServiceName); ServiceName inactiveServiceName = ConnectorServices.INACTIVE_RESOURCE_ADAPTER_SERVICE.append(idName); context.removeService(inactiveServiceName); } context.removeService(raServiceName); context.completeStep(new OperationContext.RollbackHandler() { @Override public void handleRollback(OperationContext context, ModelNode operation) { if (resourceAdapter != null) { List<ServiceController<?>> newControllers = new LinkedList<ServiceController<?>>(); if (model.get(ARCHIVE.getName()).isDefined()) { RaOperationUtil.installRaServices(context, idName, resourceAdapter, newControllers); } else { try { RaOperationUtil.installRaServicesAndDeployFromModule(context, idName, resourceAdapter, archiveOrModuleName, newControllers); } catch (OperationFailedException e) { } } try { if (wasActive) { RaOperationUtil.activate(context, idName, archiveOrModuleName); } } catch (OperationFailedException e) { } } } }); } }, OperationContext.Stage.RUNTIME); } } }
connector/src/main/java/org/jboss/as/connector/subsystems/resourceadapters/RaRemove.java
/* * JBoss, Home of Professional Open Source. * Copyright 2011, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.connector.subsystems.resourceadapters; import static org.jboss.as.connector.subsystems.resourceadapters.Constants.ARCHIVE; import static org.jboss.as.connector.subsystems.resourceadapters.Constants.MODULE; import static org.jboss.as.connector.subsystems.resourceadapters.Constants.RESOURCEADAPTERS_NAME; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR; import java.util.LinkedList; import java.util.List; import org.jboss.as.connector.logging.ConnectorLogger; import org.jboss.as.connector.util.ConnectorServices; import org.jboss.as.controller.OperationContext; import org.jboss.as.controller.OperationFailedException; import org.jboss.as.controller.OperationStepHandler; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.operations.common.Util; import org.jboss.dmr.ModelNode; import org.jboss.msc.service.ServiceController; import org.jboss.msc.service.ServiceName; /** * @author @author <a href="mailto:[email protected]">Stefano * Maestri</a> */ public class RaRemove implements OperationStepHandler { static final RaRemove INSTANCE = new RaRemove(); public void execute(OperationContext context, ModelNode operation) throws OperationFailedException { final ModelNode opAddr = operation.require(OP_ADDR); final String idName = PathAddress.pathAddress(opAddr).getLastElement().getValue(); final boolean isModule; // Compensating is add final ModelNode model = context.readResource(PathAddress.EMPTY_ADDRESS, false).getModel(); final String archiveOrModuleName; if (!model.hasDefined(ARCHIVE.getName()) && !model.hasDefined(MODULE.getName())) { throw ConnectorLogger.ROOT_LOGGER.archiveOrModuleRequired(); } if (model.get(ARCHIVE.getName()).isDefined()) { isModule = false; archiveOrModuleName = model.get(ARCHIVE.getName()).asString(); } else { isModule = true; archiveOrModuleName = model.get(MODULE.getName()).asString(); } final ModelNode compensating = Util.getEmptyOperation(ADD, opAddr); if (model.hasDefined(RESOURCEADAPTERS_NAME)) { for (ModelNode raNode : model.get(RESOURCEADAPTERS_NAME).asList()) { ModelNode raCompensatingNode = raNode.clone(); compensating.get(RESOURCEADAPTERS_NAME).add(raCompensatingNode); } } context.removeResource(PathAddress.EMPTY_ADDRESS); context.addStep(new OperationStepHandler() { public void execute(OperationContext context, ModelNode operation) throws OperationFailedException { final boolean wasActive; wasActive = RaOperationUtil.removeIfActive(context, archiveOrModuleName, idName); if (wasActive) { if(!context.isResourceServiceRestartAllowed()) { context.reloadRequired(); context.completeStep(new OperationContext.RollbackHandler() { @Override public void handleRollback(OperationContext context, ModelNode operation) { context.revertReloadRequired(); } }); return; } } ServiceName raServiceName = ServiceName.of(ConnectorServices.RA_SERVICE, idName); ServiceController<?> serviceController = context.getServiceRegistry(false).getService(raServiceName); final ModifiableResourceAdapter resourceAdapter; if (serviceController != null) { resourceAdapter = (ModifiableResourceAdapter) serviceController.getValue(); } else { resourceAdapter = null; } final List<ServiceName> serviceNameList = context.getServiceRegistry(false).getServiceNames(); for (ServiceName name : serviceNameList) { if (raServiceName.isParentOf(name)) { context.removeService(name); } } if (model.get(MODULE.getName()).isDefined()) { //ServiceName deploymentServiceName = ConnectorServices.getDeploymentServiceName(model.get(MODULE.getName()).asString(),raId); //context.removeService(deploymentServiceName); ServiceName deployerServiceName = ConnectorServices.RESOURCE_ADAPTER_DEPLOYER_SERVICE_PREFIX.append(idName); context.removeService(deployerServiceName); ServiceName inactiveServiceName = ConnectorServices.INACTIVE_RESOURCE_ADAPTER_SERVICE.append(idName); context.removeService(inactiveServiceName); } context.removeService(raServiceName); context.completeStep(new OperationContext.RollbackHandler() { @Override public void handleRollback(OperationContext context, ModelNode operation) { if (resourceAdapter != null) { List<ServiceController<?>> newControllers = new LinkedList<ServiceController<?>>(); if (model.get(ARCHIVE.getName()).isDefined()) { RaOperationUtil.installRaServices(context, idName, resourceAdapter, newControllers); } else { try { RaOperationUtil.installRaServicesAndDeployFromModule(context, idName, resourceAdapter, archiveOrModuleName, newControllers); } catch (OperationFailedException e) { } } try { if (wasActive){ RaOperationUtil.activate(context, idName, archiveOrModuleName); } } catch (OperationFailedException e) { } } } }); } }, OperationContext.Stage.RUNTIME); } }
[WFLY-9281] RaRemove shouldn't add a runtime step on an HC
connector/src/main/java/org/jboss/as/connector/subsystems/resourceadapters/RaRemove.java
[WFLY-9281] RaRemove shouldn't add a runtime step on an HC
Java
lgpl-2.1
d0ce102abb1f3768ed118ec42ac151ebdd2c9aa2
0
it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor
///////////////////////////////////////////////////////////////////////// // // © University of Southampton IT Innovation Centre, 2014 // // Copyright in this library belongs to the University of Southampton // IT Innovation Centre of Gamma House, Enterprise Road, // Chilworth Science Park, Southampton, SO16 7NS, UK. // // This software may not be used, sold, licensed, transferred, copied // or reproduced in whole or in part in any manner or form or in or // on any media by any person other than in accordance with the terms // of the Licence Agreement supplied with the software, or otherwise // without the prior written consent of the copyright owners. // // This software is distributed WITHOUT ANY WARRANTY, without even the // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // PURPOSE, except where stated in the Licence Agreement supplied with // the software. // // Created By : Simon Crowle // Maxim Bashevoy // Created Date : 2014-04-02 // Created for Project : EXPERIMEDIA // ///////////////////////////////////////////////////////////////////////// package uk.co.soton.itinnovation.ecc.service.services; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.experiment.Experiment; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.Measurement; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MeasurementSet; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.Metric; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MetricGenerator; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MetricHelper; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MetricType; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.Report; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMClient; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMDataBatch; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMPhase; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMPostReportSummary; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.provenance.EDMProvReport; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.factory.EDMInterfaceFactory; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.IMonitoringEDM; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.dao.IExperimentDAO; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.dao.IMetricGeneratorDAO; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.dao.IReportDAO; import uk.ac.soton.itinnovation.experimedia.arch.ecc.em.factory.EMInterfaceFactory; import uk.ac.soton.itinnovation.experimedia.arch.ecc.em.spec.workflow.IEMLifecycleListener; import uk.ac.soton.itinnovation.experimedia.arch.ecc.em.spec.workflow.IExperimentMonitor; import uk.co.soton.itinnovation.ecc.service.domain.DatabaseConfiguration; import uk.co.soton.itinnovation.ecc.service.domain.PROVDatabaseConfiguration; import uk.co.soton.itinnovation.ecc.service.domain.RabbitConfiguration; import uk.co.soton.itinnovation.ecc.service.process.ExperimentStateModel; import uk.co.soton.itinnovation.ecc.service.process.LiveMetricScheduler; import uk.co.soton.itinnovation.ecc.service.process.LiveMetricSchedulerListener; import uk.co.soton.itinnovation.ecc.service.process.LivePROVConsumer; /** * ExperimentService provides executive control over the ECC and experiment * work-flow. */ @Service("experimentService") public class ExperimentService { private final Logger logger = LoggerFactory.getLogger(getClass()); private final static String DEFAULT_EXPERIMENT_NAME = "EXPERIMEDIA Experiment"; private final static String DEFAULT_EXPERIMENT_DESCRIPTION = "New EXPERIMEDIA experiment"; private IExperimentMonitor expMonitor; private IMonitoringEDM expDataManager; private IMetricGeneratorDAO expMetGeneratorDAO; private IReportDAO expReportAccessor; private ExperimentStateModel expStateModel; private LiveMetricScheduler liveMetricScheduler; // private LivePROVConsumer livePROVConsumer; private boolean started = false; public ExperimentService() { } /** * Initialises the service (empty). */ @PostConstruct public void init() { } /** * Ensures the service is shut down properly. */ @PreDestroy public void shutdown() { logger.debug("Shutting down experiment service"); if (started) { // Metrics sheduling shutdown if (liveMetricScheduler != null) { liveMetricScheduler.shutDown(); liveMetricScheduler = null; } // Experiment monitor shutdown if (expMonitor != null) { expMonitor.shutDown(); expMonitor = null; } // Experiment data manager tidy up expReportAccessor = null; expMetGeneratorDAO = null; expDataManager = null; } logger.debug("Experiment service shut down"); } /** * Starts the service (should only be called by * {@link ConfigurationService}) * * @param databaseConfiguration * @param rabbitConfiguration * @return true if everything worked. */ boolean start(DatabaseConfiguration databaseConfiguration, RabbitConfiguration rabbitConfiguration) { started = false; logger.debug("Starting experiment service"); // Try setting up the metrics data management -------------------------- if (databaseConfiguration == null) { logger.error("Failed to start experiment service: database configuration is NULL"); return false; } else { Properties props = new Properties(); props.put("dbPassword", databaseConfiguration.getUserPassword()); props.put("dbName", databaseConfiguration.getDatabaseName()); props.put("dbType", databaseConfiguration.getDatabaseType()); props.put("dbURL", databaseConfiguration.getUrl()); props.put("dbUsername", databaseConfiguration.getUserName()); try { expDataManager = EDMInterfaceFactory.getMonitoringEDM(props); } catch (Exception e) { logger.error("Failed to get monitoring EDM", e); return false; } if (!expDataManager.isDatabaseSetUpAndAccessible()) { logger.error("Failed to initialise database for experiment service: could not access EDM database"); return false; } else { // Create data accessors try { expMetGeneratorDAO = expDataManager.getMetricGeneratorDAO(); expReportAccessor = expDataManager.getReportDAO(); } catch (Exception e) { logger.error("Failed to create data accessors", e); return false; } logger.info("EDM initialisation completed OK"); // Try initialising the state model ---------------------------- logger.info("Attempting to initialise experiment state"); expStateModel = new ExperimentStateModel(); try { expStateModel.initialise(props); } catch (Exception e) { logger.error("Failed to initialise experiment state", e); return false; } logger.info("State model initialised"); // Try setting up the Experiment monitor ----------------------- logger.info("Attempting to connect to RabbitMQ server"); expMonitor = EMInterfaceFactory.createEM(); expMonitor.addLifecyleListener(new ExpLifecycleListener()); // Configure EM properties if (rabbitConfiguration == null) { logger.error("Failed to initialised experiment service: Rabbit configuration is NULL"); return false; } else { props = new Properties(); props.put("Rabbit_Port", rabbitConfiguration.getPort()); props.put("Rabbit_Use_SSL", (rabbitConfiguration.isUseSsl() ? "true" : "false")); props.put("Rabbit_IP", rabbitConfiguration.getIp()); props.put("Monitor_ID", rabbitConfiguration.getMonitorId().toString()); props.put("Rabbit_Password", rabbitConfiguration.getUserPassword()); props.put("Rabbit_Username", rabbitConfiguration.getUserName()); try { // Try opening the RabbitMQ entry point for this service expMonitor.openEntryPoint(props); } catch (Exception e) { logger.error("Failed to the RabbitMQ entry point", e); return false; } logger.info("EM initialisation completed OK"); liveMetricScheduler = new LiveMetricScheduler(new LiveMetricsScheduleListener()); started = true; return true; } } } } public boolean isStarted() { return started; } public boolean isExperimentInProgress() { boolean result = false; if (started) { result = expStateModel.isExperimentActive(); } return result; } public Experiment reStartExperiment(String uuid) { // Safety first if (!started) { throw new IllegalStateException("Cannot restart experiment: service not initialised"); } Experiment newExp = getExperiment(uuid); try { // Go straight into live monitoring expMonitor.startLifecycle(newExp, EMPhase.eEMLiveMonitoring); // All persistence & process is OK, so make experiment active expStateModel.setActiveExperiment(newExp); // If we have noted any previously connected clients, get their // identities and try re-connecting them Map<UUID, String> clientInfo = expStateModel.getConnectedClientInfo(); if (!clientInfo.isEmpty()) { logger.debug("Reconnecting previously connected clients:"); Iterator<UUID> it = clientInfo.keySet().iterator(); UUID id; String info; while (it.hasNext()) { id = it.next(); info = clientInfo.get(id); logger.debug("[" + id.toString() + "] " + info); } // Need to reconnect previous clients, if any still exist // expMonitor.tryReRegisterClients(clientInfo); } return newExp; } catch (Exception e) { logger.error("Failed to restart experiment [" + uuid + "]"); return null; } } /** * Use this method to try to start a new experiment. If the input parameters * are null or there is already an active experiment, this method will * throw. Under normal conditions, this method create a new experiment in * the database and invite ECC clients already known to the service to join * the new experiment. * * @param projName - Name of the project associated with the experiment * @param expName - Name of this specific experiment * @param expDesc - Short description of the experiment * @return - Returns meta-data about the experiment, null if failed to * create. */ public Experiment startExperiment(String projName, String expName, String expDesc) { // Safety first if (!started) { throw new IllegalStateException("Cannot start experiment: service not initialised"); } if (projName == null || projName.isEmpty()) { throw new IllegalArgumentException("Cannot start experiment: project name is NULL or empty"); } if (expName == null || expName.isEmpty()) { expName = DEFAULT_EXPERIMENT_NAME; } if (expDesc == null || expDesc.isEmpty()) { expDesc = DEFAULT_EXPERIMENT_DESCRIPTION; } if (expStateModel.isExperimentActive()) { // TODO: force restart instead logger.warn("Stopping current experiment"); boolean currentExperimentStopped = stopExperiment(); logger.debug("Current experiment stopped: " + currentExperimentStopped); } // Create new experiment instance Experiment newExp = new Experiment(); newExp.setExperimentID(projName); newExp.setName(expName); newExp.setDescription(expDesc); newExp.setStartTime(new Date()); logger.debug("Creating new experiment [" + newExp.getExperimentID() + "] '" + newExp.getName() + "' (" + newExp.getDescription() + ") started at: " + newExp.getStartTime().toString()); try { // Prepare metrics database IExperimentDAO expDAO = expDataManager.getExperimentDAO(); expDAO.saveExperiment(newExp); // Try initialising the access to the PROVenance data store for experiment // TO DO: get the PROV configuration during start up // PROVDatabaseConfiguration pdc = new PROVDatabaseConfiguration(); // livePROVConsumer = new LivePROVConsumer(); // // livePROVConsumer.createExperimentRepository(newExp.getUUID(), // newExp.getName(), // pdc.getPROVRepoProperties()); // Go straight into live monitoring expMonitor.startLifecycle(newExp, EMPhase.eEMLiveMonitoring); // All persistence & process is OK, so make experiment active expStateModel.setActiveExperiment(newExp); // If we have noted any previously connected clients, get their // identities and try re-connecting them Map<UUID, String> clientInfo = expStateModel.getConnectedClientInfo(); if (!clientInfo.isEmpty()) { logger.debug("Reconnecting previously connected clients:"); Iterator<UUID> it = clientInfo.keySet().iterator(); UUID id; String info; while (it.hasNext()) { id = it.next(); info = clientInfo.get(id); logger.debug("[" + id.toString() + "] " + info); } // Need to reconnect previous clients, if any still exist expMonitor.tryReRegisterClients(clientInfo); } return newExp; } catch (Exception ex) { String problem = "Could not start experiment because: " + ex.getMessage(); logger.error(problem); return null; } } /** * Use this method to try to stop an experiment. The service will attempt to * save the finish time of the experiment to the database before then * issuing 'stop' messages to attached clients, where appropriate. * * @return true if current experiment was stopped successfully. */ public boolean stopExperiment() { if (!started) { logger.error("Cannot stop experiment: service not initialised"); return false; } Experiment exp = expStateModel.getActiveExperiment(); if (exp != null) { try { // Finish up the experiment lifecycle expMonitor.endLifecycle(); // Finish up the experiment on the database exp.setEndTime(new Date()); IExperimentDAO expDAO = expDataManager.getExperimentDAO(); expDAO.finaliseExperiment(exp); // Tidy up PROV // livePROVConsumer.closeCurrentExperimentRepository(); // Set no experiment active expStateModel.setActiveExperiment(null); return true; } catch (Exception ex) { logger.error("Failed to stop current experiment", ex); return false; } } else { logger.error("Could not stop experiment: no experiment currently active"); return false; } } /** * Returns experiment meta-data if there is an experiment currently active. * * @return - Returns NULL if no experiment is currently active. */ public Experiment getActiveExperiment() { Experiment activeExp = null; if (started) { activeExp = expStateModel.getActiveExperiment(); } return activeExp; } public Experiment getExperiment(String uuid) { try { return expDataManager.getExperimentDAO().getExperiment(UUID.fromString(uuid), true); } catch (Exception e) { logger.error("Failed to return experiment [" + uuid + "]", e); return null; } } /** * Returns the current phase of the active experiment. If there is no active * experiment, EMPhase.eEMUnknownPhase is returned. * * @return */ public EMPhase getActiveExperimentPhase() { EMPhase currentPhase = EMPhase.eEMUnknownPhase; if (started && expStateModel.isExperimentActive()) { currentPhase = expStateModel.getCurrentPhase(); } return currentPhase; } /** * Use this method to attempt to advance the current phase of the active * experiment. * * @throws Exception - throws if there is no active experiment or there are * no more phases to move on to. */ public void advanceExperimentPhase() throws Exception { // Safety first if (!started) { throw new Exception("Could not advance experiment phase: service not initialised"); } if (!expStateModel.isExperimentActive()) { throw new Exception("Could not advance experiment phase: no active experiment"); } try { expMonitor.goToNextPhase(); } catch (Exception ex) { String problem = "Could not advance experiment phase: " + ex.getMessage(); logger.error(problem); throw new Exception(problem, ex); } } /** * Use this method to retrieve the currently known connected clients. This * call will return an empty set when there are no clients or no active * experiment. IMPORTANT: the state of each client you find in this set will * be correct only at the point of calling. * * @return - Set of clients currently connected. */ public Set<EMClient> getCurrentlyConnectedClients() { HashSet<EMClient> actuallyConnectedClients = new HashSet<EMClient>(); if (started) { // Get the all clients that the monitor expects to be connected Set<EMClient> clients = expMonitor.getAllConnectedClients(); // Only return those that are not re-registering for (EMClient client : clients) { if (!client.isReRegistering()) { actuallyConnectedClients.add(client); } } } return actuallyConnectedClients; } /** * Use this method to get an instance of a client specified by an ID. * * @param id - UUID of the client required. * @return - Client and its state at the point of calling * @throws Exception - throws if the client ID is invalid or service not * ready */ public EMClient getClientByID(UUID id) throws Exception { if (!started) { throw new Exception("Cannot get client - service not initialised"); } if (id == null) { throw new Exception("Cannot get client - ID is null"); } return expMonitor.getClientByID(id); } /** * Use this call to send a 'deregister' message to a connected client. This * instruction informs the client that they should send a disconnection * message to the ECC and then disconnect from the Rabbit service. * * @param client - Client to send the deregister message. * @throws Exception - throws if the client is not known or is already * disconnected from the ECC */ public void deregisterClient(EMClient client) throws Exception { if (!started) { throw new Exception("Cannot deregister client: service not initialised"); } if (client == null) { throw new Exception("Could not deregister client: client is null"); } try { expMonitor.deregisterClient(client, "ECC service has requested de-registration"); } catch (Exception ex) { String problem = "Had problems deregistering client " + client.getName() + ": " + ex.getMessage(); logger.error(problem); throw new Exception(problem, ex); } } /** * Use this method to forcibly remove a client from the service. Note that * the experiment service cannot actually force a client's process to close * or disconnect from the RabbitMQ server. This action will only clean up * references to the client on the service side. * * @param client * @throws Exception */ public void forceClientDisconnect(EMClient client) throws Exception { // Safety first if (!started) { throw new Exception("Cannot force client disconnection: service not initialised"); } if (client == null) { throw new Exception("Cannot forcibly disconnect client: client is null"); } try { logger.info("Trying to forcibly remove client " + client.getName() + " from service"); expMonitor.forceClientDisconnection(client); liveMetricScheduler.removeClient(client); } catch (Exception ex) { logger.error("Could not forcibly remove client " + client.getName() + " because: " + ex.getMessage()); } } // Private methods --------------------------------------------------------- private void processLiveMetricData(Report report) throws Exception { // Safety first if (!started) { throw new Exception("Cannot process live metric data: experiment service not initialised"); } if (!expStateModel.isExperimentActive()) { throw new Exception("Cannot process live metric data: no currently active experiment"); } if (report == null) { throw new Exception("Live monitoring metric: report is null"); } // Check to see if we have anything useful store, and try store if (sanitiseMetricReport(report)) { // First get the EDM to save the measurements try { expReportAccessor.saveMeasurements(report); } catch (Exception ex) { throw ex; } } } private boolean sanitiseMetricReport(Report reportOUT) { // Check that we apparently have data Integer nom = reportOUT.getNumberOfMeasurements(); if (nom == null || nom == 0) { logger.warn("Did not process metric report: measurement count = 0"); return false; } // Make sure we have a valid measurement set MeasurementSet clientMS = reportOUT.getMeasurementSet(); if (clientMS == null) { logger.warn("Did not process metric report: Measurement set is null"); return false; } Metric metric = clientMS.getMetric(); if (metric == null) { logger.warn("Did not process metric report: Metric is null"); return false; } MetricType mt = metric.getMetricType(); // Sanitise data based on full semantic info MeasurementSet cleanSet = new MeasurementSet(clientMS, false); // Run through each measurement checking that it is sane for (Measurement m : clientMS.getMeasurements()) { String val = m.getValue(); switch (mt) { case NOMINAL: case ORDINAL: if (val != null && !val.isEmpty()) { cleanSet.addMeasurement(m); } break; case INTERVAL: case RATIO: { if (val != null) { try { // Make sure we have a sensible number Double dVal = Double.parseDouble(val); if (!dVal.isNaN() && !dVal.isInfinite()) { cleanSet.addMeasurement(m); } } catch (Exception ex) { logger.warn("Caught NaN value in measurement: dropping"); } } } break; } } // Use update report with clean measurement set reportOUT.setMeasurementSet(cleanSet); reportOUT.setNumberOfMeasurements(cleanSet.getMeasurements().size()); return true; } private void processLivePROVData(EDMProvReport report) throws Exception { // if (livePROVConsumer == null) { // throw new Exception("Could not process PROV report: PROV consumer is null"); // } // // if (report == null) { // throw new Exception("Could not process PROV report: report is null"); // } // // try { // livePROVConsumer.addPROVReport(report); // } catch (Exception ex) { // String msg = "Could not store PROV report: " + ex.getMessage(); // logger.error(msg); // // throw new Exception(msg); // } } // Private classes --------------------------------------------------------- private class ExpLifecycleListener implements IEMLifecycleListener { public ExpLifecycleListener() { } // IEMLifecycleListener ------------------------------------------------ @Override public void onClientConnected(EMClient client, boolean reconnected) { logger.info("Client connected: " + client.getName() + (reconnected ? "[reconnection]" : ".")); // If the client is re-registering, do not mark them as connected // just yet; they need to respond in Discovery phase before we know // they are really there if (!client.isReRegistering()) { expStateModel.setClientConnectedState(client, true); } } @Override public void onClientDisconnected(UUID clientID) { EMClient client = expMonitor.getClientByID(clientID); if (client != null) { expStateModel.setClientConnectedState(client, false); logger.info("Client " + client.getName() + " disconnected"); // Stop scheduling metrics from this client if (liveMetricScheduler != null) { try { liveMetricScheduler.removeClient(client); } catch (Exception ex) { logger.warn("Client disconencted; metric scheduler says: " + ex.getMessage()); } } } else { logger.warn("Got a disconnection message from an unknown client"); } } @Override public void onClientStartedPhase(EMClient client, EMPhase phase) { logger.info("Client " + client.getName() + " started phase " + phase.name()); } @Override public void onLifecyclePhaseStarted(EMPhase phase) { logger.info("Experiment lifecycle phase " + phase.name() + " started"); expStateModel.setCurrentPhase(phase); // Perform starting actions, as required switch (phase) { case eEMLiveMonitoring: { liveMetricScheduler.start(expMonitor); } break; case eEMPostMonitoringReport: { liveMetricScheduler.stop(); } break; } } @Override public void onLifecyclePhaseCompleted(EMPhase phase) { logger.info("Experiment lifecycle phase " + phase.name() + " completed"); } @Override public void onNoFurtherLifecyclePhases() { logger.info("No further experiment lifecycle phases"); expStateModel.setCurrentPhase(EMPhase.eEMProtocolComplete); } @Override public void onLifecycleEnded() { logger.info("Experiment lifecycle has ended"); liveMetricScheduler.stop(); liveMetricScheduler.reset(); try { expMonitor.resetLifecycle(); } catch (Exception ex) { logger.error("Could not reset experiment lifecycle: " + ex.getMessage()); } } @Override public void onFoundClientWithMetricGenerators(EMClient client, Set<MetricGenerator> newGens) { if (client != null && newGens != null) { if (client.isReRegistering()) { logger.info("Known client connected: " + client.getID() + " (\"" + client.getName() + "\")"); } // Pass on metric generators to the EDM for storage UUID expID = expStateModel.getActiveExperiment().getUUID(); for (MetricGenerator mg : newGens) { // Check metric generator has at least one entity if (!MetricHelper.getAllEntities(mg).isEmpty()) { try { expMetGeneratorDAO.saveMetricGenerator(mg, expID); } catch (Exception ex) { logger.error("Failed to save metric generators for client " + client.getName() + ": " + ex.getMessage()); } } } } else { logger.error("Received invalid metric generator event"); } } @Override public void onClientEnabledMetricCollection(EMClient client, UUID entityID, boolean enabled) { if (client != null && entityID != null) { String msg = "Client " + client + " has " + (enabled ? "enabled" : "disabled") + " metric collection for Entity ID: " + entityID.toString(); logger.info(msg); } else { logger.error("Received invalid metric collection enabling message"); } } @Override public void onClientSetupResult(EMClient client, boolean success) { if (client != null) { logger.info("Client " + client.getName() + " has completed set up"); } } @Override public void onClientDeclaredCanPush(EMClient client) { if (client != null) { logger.info("Client " + client.getName() + " can push"); } } @Override public void onClientDeclaredCanBePulled(EMClient client) { if (client != null) { if (expStateModel.getCurrentPhase().equals(EMPhase.eEMLiveMonitoring)) { try { liveMetricScheduler.addClient(client); } catch (Exception ex) { logger.error("Could not add pulling client to live monitoring: " + ex.getMessage()); } } else { logger.warn("Client " + client.getName() + " trying to start pull process whilst not in Live monitoring"); } } else { logger.warn("Got pull semantics from unknown client"); } } @Override public void onGotMetricData(EMClient client, Report report) { if (client != null && report != null) { try { processLiveMetricData(report); } catch (Exception ex) { String problem = "Could not save measurements for client: " + client.getName() + " because: " + ex.getMessage(); logger.error(problem); } } } @Override public void onGotPROVData(EMClient client, EDMProvReport report) { if (report != null) { try { processLivePROVData(report); } catch (Exception ex) { String problem = "Could not save provenance statement for client " + client.getName() + " because: " + ex.getMessage(); logger.error(problem); } } } @Override public void onGotSummaryReport(EMClient client, EMPostReportSummary summary) { if (client != null && summary != null) { try { expMonitor.getAllDataBatches(client); logger.info("Requested missing metric data from " + client.getName()); } catch (Exception ex) { String problem = "Could not request missing metric data from " + client + " because: " + ex.getMessage(); logger.error(problem); } } else { logger.error("Client " + client.getName() + " provided an empty summary report"); } } @Override public void onGotDataBatch(EMClient client, EMDataBatch batch) { if (client != null && batch != null) { try { expReportAccessor.saveReport(batch.getBatchReport(), true); } catch (Exception e) { logger.error("Could not save batch data report: " + e.getMessage()); } } } @Override public void onDataBatchMeasurementSetCompleted(EMClient client, UUID measurementSetID) { if (client != null && measurementSetID != null) { logger.info("Client " + client.getName() + " finished batching for measurement set: " + measurementSetID.toString()); } } @Override public void onAllDataBatchesRequestComplete(EMClient client) { if (client != null) { logger.info("Client " + client.getName() + " has finished batching missing data"); } } @Override public void onClientTearDownResult(EMClient client, boolean success) { if (client != null) { logger.info("Client " + client.getName() + " has finished tearing down"); } } } private class LiveMetricsScheduleListener implements LiveMetricSchedulerListener { public LiveMetricsScheduleListener() { } // LiveMetricSchedulerListener ----------------------------------------- @Override public void onIssuedClientMetricPull(EMClient client) { if (client != null) { logger.debug("Issued metric pull on client: " + client.getName()); } } @Override public void onPullMetricFailed(EMClient client, String reason) { if (client != null) { if (reason == null) { reason = "Unknown reason"; } logger.debug("Did not pull client" + client.getName() + ": " + reason); } } } }
eccService/src/main/java/uk/co/soton/itinnovation/ecc/service/services/ExperimentService.java
///////////////////////////////////////////////////////////////////////// // // © University of Southampton IT Innovation Centre, 2014 // // Copyright in this library belongs to the University of Southampton // IT Innovation Centre of Gamma House, Enterprise Road, // Chilworth Science Park, Southampton, SO16 7NS, UK. // // This software may not be used, sold, licensed, transferred, copied // or reproduced in whole or in part in any manner or form or in or // on any media by any person other than in accordance with the terms // of the Licence Agreement supplied with the software, or otherwise // without the prior written consent of the copyright owners. // // This software is distributed WITHOUT ANY WARRANTY, without even the // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // PURPOSE, except where stated in the Licence Agreement supplied with // the software. // // Created By : Simon Crowle // Maxim Bashevoy // Created Date : 2014-04-02 // Created for Project : EXPERIMEDIA // ///////////////////////////////////////////////////////////////////////// package uk.co.soton.itinnovation.ecc.service.services; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.experiment.Experiment; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.Measurement; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MeasurementSet; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.Metric; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MetricGenerator; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MetricHelper; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.MetricType; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.Report; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMClient; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMDataBatch; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMPhase; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.EMPostReportSummary; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.provenance.EDMProvReport; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.factory.EDMInterfaceFactory; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.IMonitoringEDM; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.dao.IExperimentDAO; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.dao.IMetricGeneratorDAO; import uk.ac.soton.itinnovation.experimedia.arch.ecc.edm.spec.metrics.dao.IReportDAO; import uk.ac.soton.itinnovation.experimedia.arch.ecc.em.factory.EMInterfaceFactory; import uk.ac.soton.itinnovation.experimedia.arch.ecc.em.spec.workflow.IEMLifecycleListener; import uk.ac.soton.itinnovation.experimedia.arch.ecc.em.spec.workflow.IExperimentMonitor; import uk.co.soton.itinnovation.ecc.service.domain.DatabaseConfiguration; import uk.co.soton.itinnovation.ecc.service.domain.PROVDatabaseConfiguration; import uk.co.soton.itinnovation.ecc.service.domain.RabbitConfiguration; import uk.co.soton.itinnovation.ecc.service.process.ExperimentStateModel; import uk.co.soton.itinnovation.ecc.service.process.LiveMetricScheduler; import uk.co.soton.itinnovation.ecc.service.process.LiveMetricSchedulerListener; import uk.co.soton.itinnovation.ecc.service.process.LivePROVConsumer; /** * ExperimentService provides executive control over the ECC and experiment * work-flow. */ @Service("experimentService") public class ExperimentService { private final Logger logger = LoggerFactory.getLogger(getClass()); private final static String DEFAULT_EXPERIMENT_NAME = "EXPERIMEDIA Experiment"; private final static String DEFAULT_EXPERIMENT_DESCRIPTION = "New EXPERIMEDIA experiment"; private IExperimentMonitor expMonitor; private IMonitoringEDM expDataManager; private IMetricGeneratorDAO expMetGeneratorDAO; private IReportDAO expReportAccessor; private ExperimentStateModel expStateModel; private LiveMetricScheduler liveMetricScheduler; // private LivePROVConsumer livePROVConsumer; private boolean started = false; public ExperimentService() { } /** * Initialises the service (empty). */ @PostConstruct public void init() { } /** * Ensures the service is shut down properly. */ @PreDestroy public void shutdown() { logger.debug("Shutting down experiment service"); if (started) { // Metrics sheduling shutdown if (liveMetricScheduler != null) { liveMetricScheduler.shutDown(); liveMetricScheduler = null; } // Experiment monitor shutdown if (expMonitor != null) { expMonitor.shutDown(); expMonitor = null; } // Experiment data manager tidy up expReportAccessor = null; expMetGeneratorDAO = null; expDataManager = null; } logger.debug("Experiment service shut down"); } /** * Starts the service (should only be called by * {@link ConfigurationService}) * * @param databaseConfiguration * @param rabbitConfiguration * @return true if everything worked. */ boolean start(DatabaseConfiguration databaseConfiguration, RabbitConfiguration rabbitConfiguration) { started = false; logger.debug("Starting experiment service"); // Try setting up the metrics data management -------------------------- if (databaseConfiguration == null) { logger.error("Failed to start experiment service: database configuration is NULL"); return false; } else { Properties props = new Properties(); props.put("dbPassword", databaseConfiguration.getUserPassword()); props.put("dbName", databaseConfiguration.getDatabaseName()); props.put("dbType", databaseConfiguration.getDatabaseType()); props.put("dbURL", databaseConfiguration.getUrl()); props.put("dbUsername", databaseConfiguration.getUserName()); try { expDataManager = EDMInterfaceFactory.getMonitoringEDM(props); } catch (Exception e) { logger.error("Failed to get monitoring EDM", e); return false; } if (!expDataManager.isDatabaseSetUpAndAccessible()) { logger.error("Failed to initialise database for experiment service: could not access EDM database"); return false; } else { // Create data accessors try { expMetGeneratorDAO = expDataManager.getMetricGeneratorDAO(); expReportAccessor = expDataManager.getReportDAO(); } catch (Exception e) { logger.error("Failed to create data accessors", e); return false; } logger.info("EDM initialisation completed OK"); // Try initialising the state model ---------------------------- logger.info("Attempting to initialise experiment state"); expStateModel = new ExperimentStateModel(); try { expStateModel.initialise(props); } catch (Exception e) { logger.error("Failed to initialise experiment state", e); return false; } logger.info("State model initialised"); // Try setting up the Experiment monitor ----------------------- logger.info("Attempting to connect to RabbitMQ server"); expMonitor = EMInterfaceFactory.createEM(); expMonitor.addLifecyleListener(new ExpLifecycleListener()); // Configure EM properties if (rabbitConfiguration == null) { logger.error("Failed to initialised experiment service: Rabbit configuration is NULL"); return false; } else { props = new Properties(); props.put("Rabbit_Port", rabbitConfiguration.getPort()); props.put("Rabbit_Use_SSL", (rabbitConfiguration.isUseSsl() ? "true" : "false")); props.put("Rabbit_IP", rabbitConfiguration.getIp()); props.put("Monitor_ID", rabbitConfiguration.getMonitorId().toString()); props.put("Rabbit_Password", rabbitConfiguration.getUserPassword()); props.put("Rabbit_Username", rabbitConfiguration.getUserName()); try { // Try opening the RabbitMQ entry point for this service expMonitor.openEntryPoint(props); } catch (Exception e) { logger.error("Failed to the RabbitMQ entry point", e); return false; } logger.info("EM initialisation completed OK"); liveMetricScheduler = new LiveMetricScheduler(new LiveMetricsScheduleListener()); started = true; return true; } } } } public boolean isStarted() { return started; } public boolean isExperimentInProgress() { boolean result = false; if (started) { result = expStateModel.isExperimentActive(); } return result; } public Experiment reStartExperiment(String uuid) { // Safety first if (!started) { throw new IllegalStateException("Cannot restart experiment: service not initialised"); } Experiment newExp = getExperiment(uuid); try { // Go straight into live monitoring expMonitor.startLifecycle(newExp, EMPhase.eEMLiveMonitoring); // All persistence & process is OK, so make experiment active expStateModel.setActiveExperiment(newExp); // If we have noted any previously connected clients, get their // identities and try re-connecting them Map<UUID, String> clientInfo = expStateModel.getConnectedClientInfo(); if (!clientInfo.isEmpty()) { logger.debug("Reconnecting previously connected clients:"); Iterator<UUID> it = clientInfo.keySet().iterator(); UUID id; String info; while (it.hasNext()) { id = it.next(); info = clientInfo.get(id); logger.debug("[" + id.toString() + "] " + info); } // Need to reconnect previous clients, if any still exist // expMonitor.tryReRegisterClients(clientInfo); } return newExp; } catch (Exception e) { logger.error("Failed to restart experiment [" + uuid + "]"); return null; } } /** * Use this method to try to start a new experiment. If the input parameters * are null or there is already an active experiment, this method will * throw. Under normal conditions, this method create a new experiment in * the database and invite ECC clients already known to the service to join * the new experiment. * * @param projName - Name of the project associated with the experiment * @param expName - Name of this specific experiment * @param expDesc - Short description of the experiment * @return - Returns meta-data about the experiment, null if failed to * create. */ public Experiment startExperiment(String projName, String expName, String expDesc) { // Safety first if (!started) { throw new IllegalStateException("Cannot start experiment: service not initialised"); } if (projName == null || projName.isEmpty()) { throw new IllegalArgumentException("Cannot start experiment: project name is NULL or empty"); } if (expName == null || expName.isEmpty()) { expName = DEFAULT_EXPERIMENT_NAME; } if (expDesc == null || expDesc.isEmpty()) { expDesc = DEFAULT_EXPERIMENT_DESCRIPTION; } if (expStateModel.isExperimentActive()) { // TODO: force restart instead logger.warn("Stopping current experiment"); boolean currentExperimentStopped = stopExperiment(); logger.debug("Current experiment stopped: " + currentExperimentStopped); } // Create new experiment instance Experiment newExp = new Experiment(); newExp.setExperimentID(projName); newExp.setName(expName); newExp.setDescription(expDesc); newExp.setStartTime(new Date()); logger.debug("Creating new experiment [" + newExp.getExperimentID() + "] '" + newExp.getName() + "' (" + newExp.getDescription() + ") started at: " + newExp.getStartTime().toString()); try { // Prepare metrics database IExperimentDAO expDAO = expDataManager.getExperimentDAO(); expDAO.saveExperiment(newExp); // Try initialising the access to the PROVenance data store for experiment // TO DO: get the PROV configuration during start up // PROVDatabaseConfiguration pdc = new PROVDatabaseConfiguration(); // livePROVConsumer = new LivePROVConsumer(); // // livePROVConsumer.createExperimentRepository(newExp.getUUID(), // newExp.getName(), // pdc.getPROVRepoProperties()); // Go straight into live monitoring expMonitor.startLifecycle(newExp, EMPhase.eEMLiveMonitoring); // All persistence & process is OK, so make experiment active expStateModel.setActiveExperiment(newExp); // If we have noted any previously connected clients, get their // identities and try re-connecting them Map<UUID, String> clientInfo = expStateModel.getConnectedClientInfo(); if (!clientInfo.isEmpty()) { logger.debug("Reconnecting previously connected clients:"); Iterator<UUID> it = clientInfo.keySet().iterator(); UUID id; String info; while (it.hasNext()) { id = it.next(); info = clientInfo.get(id); logger.debug("[" + id.toString() + "] " + info); } // Need to reconnect previous clients, if any still exist // expMonitor.tryReRegisterClients(clientInfo); } return newExp; } catch (Exception ex) { String problem = "Could not start experiment because: " + ex.getMessage(); logger.error(problem); return null; } } /** * Use this method to try to stop an experiment. The service will attempt to * save the finish time of the experiment to the database before then * issuing 'stop' messages to attached clients, where appropriate. * * @return true if current experiment was stopped successfully. */ public boolean stopExperiment() { if (!started) { logger.error("Cannot stop experiment: service not initialised"); return false; } Experiment exp = expStateModel.getActiveExperiment(); if (exp != null) { try { // Finish up the experiment lifecycle expMonitor.endLifecycle(); // Finish up the experiment on the database exp.setEndTime(new Date()); IExperimentDAO expDAO = expDataManager.getExperimentDAO(); expDAO.finaliseExperiment(exp); // Tidy up PROV // livePROVConsumer.closeCurrentExperimentRepository(); // Set no experiment active expStateModel.setActiveExperiment(null); return true; } catch (Exception ex) { logger.error("Failed to stop current experiment", ex); return false; } } else { logger.error("Could not stop experiment: no experiment currently active"); return false; } } /** * Returns experiment meta-data if there is an experiment currently active. * * @return - Returns NULL if no experiment is currently active. */ public Experiment getActiveExperiment() { Experiment activeExp = null; if (started) { activeExp = expStateModel.getActiveExperiment(); } return activeExp; } public Experiment getExperiment(String uuid) { try { return expDataManager.getExperimentDAO().getExperiment(UUID.fromString(uuid), true); } catch (Exception e) { logger.error("Failed to return experiment [" + uuid + "]", e); return null; } } /** * Returns the current phase of the active experiment. If there is no active * experiment, EMPhase.eEMUnknownPhase is returned. * * @return */ public EMPhase getActiveExperimentPhase() { EMPhase currentPhase = EMPhase.eEMUnknownPhase; if (started && expStateModel.isExperimentActive()) { currentPhase = expStateModel.getCurrentPhase(); } return currentPhase; } /** * Use this method to attempt to advance the current phase of the active * experiment. * * @throws Exception - throws if there is no active experiment or there are * no more phases to move on to. */ public void advanceExperimentPhase() throws Exception { // Safety first if (!started) { throw new Exception("Could not advance experiment phase: service not initialised"); } if (!expStateModel.isExperimentActive()) { throw new Exception("Could not advance experiment phase: no active experiment"); } try { expMonitor.goToNextPhase(); } catch (Exception ex) { String problem = "Could not advance experiment phase: " + ex.getMessage(); logger.error(problem); throw new Exception(problem, ex); } } /** * Use this method to retrieve the currently known connected clients. This * call will return an empty set when there are no clients or no active * experiment. IMPORTANT: the state of each client you find in this set will * be correct only at the point of calling. * * @return - Set of clients currently connected. */ public Set<EMClient> getCurrentlyConnectedClients() { HashSet<EMClient> actuallyConnectedClients = new HashSet<EMClient>(); if (started) { // Get the all clients that the monitor expects to be connected Set<EMClient> clients = expMonitor.getAllConnectedClients(); // Only return those that are not re-registering for (EMClient client : clients) { if (!client.isReRegistering()) { actuallyConnectedClients.add(client); } } } return actuallyConnectedClients; } /** * Use this method to get an instance of a client specified by an ID. * * @param id - UUID of the client required. * @return - Client and its state at the point of calling * @throws Exception - throws if the client ID is invalid or service not * ready */ public EMClient getClientByID(UUID id) throws Exception { if (!started) { throw new Exception("Cannot get client - service not initialised"); } if (id == null) { throw new Exception("Cannot get client - ID is null"); } return expMonitor.getClientByID(id); } /** * Use this call to send a 'deregister' message to a connected client. This * instruction informs the client that they should send a disconnection * message to the ECC and then disconnect from the Rabbit service. * * @param client - Client to send the deregister message. * @throws Exception - throws if the client is not known or is already * disconnected from the ECC */ public void deregisterClient(EMClient client) throws Exception { if (!started) { throw new Exception("Cannot deregister client: service not initialised"); } if (client == null) { throw new Exception("Could not deregister client: client is null"); } try { expMonitor.deregisterClient(client, "ECC service has requested de-registration"); } catch (Exception ex) { String problem = "Had problems deregistering client " + client.getName() + ": " + ex.getMessage(); logger.error(problem); throw new Exception(problem, ex); } } /** * Use this method to forcibly remove a client from the service. Note that * the experiment service cannot actually force a client's process to close * or disconnect from the RabbitMQ server. This action will only clean up * references to the client on the service side. * * @param client * @throws Exception */ public void forceClientDisconnect(EMClient client) throws Exception { // Safety first if (!started) { throw new Exception("Cannot force client disconnection: service not initialised"); } if (client == null) { throw new Exception("Cannot forcibly disconnect client: client is null"); } try { logger.info("Trying to forcibly remove client " + client.getName() + " from service"); expMonitor.forceClientDisconnection(client); liveMetricScheduler.removeClient(client); } catch (Exception ex) { logger.error("Could not forcibly remove client " + client.getName() + " because: " + ex.getMessage()); } } // Private methods --------------------------------------------------------- private void processLiveMetricData(Report report) throws Exception { // Safety first if (!started) { throw new Exception("Cannot process live metric data: experiment service not initialised"); } if (!expStateModel.isExperimentActive()) { throw new Exception("Cannot process live metric data: no currently active experiment"); } if (report == null) { throw new Exception("Live monitoring metric: report is null"); } // Check to see if we have anything useful store, and try store if (sanitiseMetricReport(report)) { // First get the EDM to save the measurements try { expReportAccessor.saveMeasurements(report); } catch (Exception ex) { throw ex; } } } private boolean sanitiseMetricReport(Report reportOUT) { // Check that we apparently have data Integer nom = reportOUT.getNumberOfMeasurements(); if (nom == null || nom == 0) { logger.warn("Did not process metric report: measurement count = 0"); return false; } // Make sure we have a valid measurement set MeasurementSet clientMS = reportOUT.getMeasurementSet(); if (clientMS == null) { logger.warn("Did not process metric report: Measurement set is null"); return false; } Metric metric = clientMS.getMetric(); if (metric == null) { logger.warn("Did not process metric report: Metric is null"); return false; } MetricType mt = metric.getMetricType(); // Sanitise data based on full semantic info MeasurementSet cleanSet = new MeasurementSet(clientMS, false); // Run through each measurement checking that it is sane for (Measurement m : clientMS.getMeasurements()) { String val = m.getValue(); switch (mt) { case NOMINAL: case ORDINAL: if (val != null && !val.isEmpty()) { cleanSet.addMeasurement(m); } break; case INTERVAL: case RATIO: { if (val != null) { try { // Make sure we have a sensible number Double dVal = Double.parseDouble(val); if (!dVal.isNaN() && !dVal.isInfinite()) { cleanSet.addMeasurement(m); } } catch (Exception ex) { logger.warn("Caught NaN value in measurement: dropping"); } } } break; } } // Use update report with clean measurement set reportOUT.setMeasurementSet(cleanSet); reportOUT.setNumberOfMeasurements(cleanSet.getMeasurements().size()); return true; } private void processLivePROVData(EDMProvReport report) throws Exception { // if (livePROVConsumer == null) { // throw new Exception("Could not process PROV report: PROV consumer is null"); // } // // if (report == null) { // throw new Exception("Could not process PROV report: report is null"); // } // // try { // livePROVConsumer.addPROVReport(report); // } catch (Exception ex) { // String msg = "Could not store PROV report: " + ex.getMessage(); // logger.error(msg); // // throw new Exception(msg); // } } // Private classes --------------------------------------------------------- private class ExpLifecycleListener implements IEMLifecycleListener { public ExpLifecycleListener() { } // IEMLifecycleListener ------------------------------------------------ @Override public void onClientConnected(EMClient client, boolean reconnected) { logger.info("Client connected: " + client.getName() + (reconnected ? "[reconnection]" : ".")); // If the client is re-registering, do not mark them as connected // just yet; they need to respond in Discovery phase before we know // they are really there if (!client.isReRegistering()) { expStateModel.setClientConnectedState(client, true); } } @Override public void onClientDisconnected(UUID clientID) { EMClient client = expMonitor.getClientByID(clientID); if (client != null) { expStateModel.setClientConnectedState(client, false); logger.info("Client " + client.getName() + " disconnected"); // Stop scheduling metrics from this client if (liveMetricScheduler != null) { try { liveMetricScheduler.removeClient(client); } catch (Exception ex) { logger.warn("Client disconencted; metric scheduler says: " + ex.getMessage()); } } } else { logger.warn("Got a disconnection message from an unknown client"); } } @Override public void onClientStartedPhase(EMClient client, EMPhase phase) { logger.info("Client " + client.getName() + " started phase " + phase.name()); } @Override public void onLifecyclePhaseStarted(EMPhase phase) { logger.info("Experiment lifecycle phase " + phase.name() + " started"); expStateModel.setCurrentPhase(phase); // Perform starting actions, as required switch (phase) { case eEMLiveMonitoring: { liveMetricScheduler.start(expMonitor); } break; case eEMPostMonitoringReport: { liveMetricScheduler.stop(); } break; } } @Override public void onLifecyclePhaseCompleted(EMPhase phase) { logger.info("Experiment lifecycle phase " + phase.name() + " completed"); } @Override public void onNoFurtherLifecyclePhases() { logger.info("No further experiment lifecycle phases"); expStateModel.setCurrentPhase(EMPhase.eEMProtocolComplete); } @Override public void onLifecycleEnded() { logger.info("Experiment lifecycle has ended"); liveMetricScheduler.stop(); liveMetricScheduler.reset(); try { expMonitor.resetLifecycle(); } catch (Exception ex) { logger.error("Could not reset experiment lifecycle: " + ex.getMessage()); } } @Override public void onFoundClientWithMetricGenerators(EMClient client, Set<MetricGenerator> newGens) { if (client != null && newGens != null) { if (client.isReRegistering()) { logger.info("Known client connected: " + client.getID() + " (\"" + client.getName() + "\")"); } // Pass on metric generators to the EDM for storage UUID expID = expStateModel.getActiveExperiment().getUUID(); for (MetricGenerator mg : newGens) { // Check metric generator has at least one entity if (!MetricHelper.getAllEntities(mg).isEmpty()) { try { expMetGeneratorDAO.saveMetricGenerator(mg, expID); } catch (Exception ex) { logger.error("Failed to save metric generators for client " + client.getName() + ": " + ex.getMessage()); } } } } else { logger.error("Received invalid metric generator event"); } } @Override public void onClientEnabledMetricCollection(EMClient client, UUID entityID, boolean enabled) { if (client != null && entityID != null) { String msg = "Client " + client + " has " + (enabled ? "enabled" : "disabled") + " metric collection for Entity ID: " + entityID.toString(); logger.info(msg); } else { logger.error("Received invalid metric collection enabling message"); } } @Override public void onClientSetupResult(EMClient client, boolean success) { if (client != null) { logger.info("Client " + client.getName() + " has completed set up"); } } @Override public void onClientDeclaredCanPush(EMClient client) { if (client != null) { logger.info("Client " + client.getName() + " can push"); } } @Override public void onClientDeclaredCanBePulled(EMClient client) { if (client != null) { if (expStateModel.getCurrentPhase().equals(EMPhase.eEMLiveMonitoring)) { try { liveMetricScheduler.addClient(client); } catch (Exception ex) { logger.error("Could not add pulling client to live monitoring: " + ex.getMessage()); } } else { logger.warn("Client " + client.getName() + " trying to start pull process whilst not in Live monitoring"); } } else { logger.warn("Got pull semantics from unknown client"); } } @Override public void onGotMetricData(EMClient client, Report report) { if (client != null && report != null) { try { processLiveMetricData(report); } catch (Exception ex) { String problem = "Could not save measurements for client: " + client.getName() + " because: " + ex.getMessage(); logger.error(problem); } } } @Override public void onGotPROVData(EMClient client, EDMProvReport report) { if (report != null) { try { processLivePROVData(report); } catch (Exception ex) { String problem = "Could not save provenance statement for client " + client.getName() + " because: " + ex.getMessage(); logger.error(problem); } } } @Override public void onGotSummaryReport(EMClient client, EMPostReportSummary summary) { if (client != null && summary != null) { try { expMonitor.getAllDataBatches(client); logger.info("Requested missing metric data from " + client.getName()); } catch (Exception ex) { String problem = "Could not request missing metric data from " + client + " because: " + ex.getMessage(); logger.error(problem); } } else { logger.error("Client " + client.getName() + " provided an empty summary report"); } } @Override public void onGotDataBatch(EMClient client, EMDataBatch batch) { if (client != null && batch != null) { try { expReportAccessor.saveReport(batch.getBatchReport(), true); } catch (Exception e) { logger.error("Could not save batch data report: " + e.getMessage()); } } } @Override public void onDataBatchMeasurementSetCompleted(EMClient client, UUID measurementSetID) { if (client != null && measurementSetID != null) { logger.info("Client " + client.getName() + " finished batching for measurement set: " + measurementSetID.toString()); } } @Override public void onAllDataBatchesRequestComplete(EMClient client) { if (client != null) { logger.info("Client " + client.getName() + " has finished batching missing data"); } } @Override public void onClientTearDownResult(EMClient client, boolean success) { if (client != null) { logger.info("Client " + client.getName() + " has finished tearing down"); } } } private class LiveMetricsScheduleListener implements LiveMetricSchedulerListener { public LiveMetricsScheduleListener() { } // LiveMetricSchedulerListener ----------------------------------------- @Override public void onIssuedClientMetricPull(EMClient client) { if (client != null) { logger.debug("Issued metric pull on client: " + client.getName()); } } @Override public void onPullMetricFailed(EMClient client, String reason) { if (client != null) { if (reason == null) { reason = "Unknown reason"; } logger.debug("Did not pull client" + client.getName() + ": " + reason); } } } }
Re-enables reconnection of existing clients
eccService/src/main/java/uk/co/soton/itinnovation/ecc/service/services/ExperimentService.java
Re-enables reconnection of existing clients
Java
apache-2.0
53e95788f7ebcd8efbffc633e177105fa0cfdadc
0
tadayosi/camel,JYBESSON/camel,mcollovati/camel,eformat/camel,erwelch/camel,apache/camel,MohammedHammam/camel,CandleCandle/camel,eformat/camel,jpav/camel,drsquidop/camel,nikhilvibhav/camel,anoordover/camel,chirino/camel,RohanHart/camel,jlpedrosa/camel,woj-i/camel,w4tson/camel,grange74/camel,chirino/camel,DariusX/camel,objectiser/camel,sebi-hgdata/camel,manuelh9r/camel,grgrzybek/camel,woj-i/camel,brreitme/camel,mgyongyosi/camel,sverkera/camel,josefkarasek/camel,MrCoder/camel,driseley/camel,nicolaferraro/camel,w4tson/camel,tdiesler/camel,davidkarlsen/camel,scranton/camel,skinzer/camel,noelo/camel,isururanawaka/camel,tlehoux/camel,duro1/camel,borcsokj/camel,lowwool/camel,anton-k11/camel,christophd/camel,scranton/camel,yury-vashchyla/camel,isururanawaka/camel,erwelch/camel,snurmine/camel,maschmid/camel,sirlatrom/camel,sebi-hgdata/camel,stalet/camel,acartapanis/camel,FingolfinTEK/camel,punkhorn/camel-upstream,snurmine/camel,royopa/camel,allancth/camel,jpav/camel,jkorab/camel,mnki/camel,pax95/camel,CodeSmell/camel,scranton/camel,dpocock/camel,cunningt/camel,duro1/camel,johnpoth/camel,davidkarlsen/camel,ssharma/camel,tadayosi/camel,mzapletal/camel,royopa/camel,tlehoux/camel,yogamaha/camel,akhettar/camel,mcollovati/camel,duro1/camel,FingolfinTEK/camel,ekprayas/camel,sabre1041/camel,oalles/camel,prashant2402/camel,askannon/camel,ullgren/camel,josefkarasek/camel,edigrid/camel,dvankleef/camel,arnaud-deprez/camel,driseley/camel,logzio/camel,davidkarlsen/camel,ssharma/camel,brreitme/camel,erwelch/camel,woj-i/camel,tadayosi/camel,jarst/camel,mohanaraosv/camel,alvinkwekel/camel,YoshikiHigo/camel,Thopap/camel,w4tson/camel,mike-kukla/camel,allancth/camel,tkopczynski/camel,bfitzpat/camel,chanakaudaya/camel,qst-jdc-labs/camel,neoramon/camel,jpav/camel,jameszkw/camel,YMartsynkevych/camel,MohammedHammam/camel,yuruki/camel,RohanHart/camel,pax95/camel,logzio/camel,pplatek/camel,jonmcewen/camel,mgyongyosi/camel,partis/camel,maschmid/camel,mohanaraosv/camel,davidwilliams1978/camel,allancth/camel,anton-k11/camel,joakibj/camel,sebi-hgdata/camel,NickCis/camel,ullgren/camel,lburgazzoli/apache-camel,haku/camel,iweiss/camel,w4tson/camel,nicolaferraro/camel,partis/camel,dsimansk/camel,Fabryprog/camel,dvankleef/camel,ge0ffrey/camel,bgaudaen/camel,dpocock/camel,drsquidop/camel,tarilabs/camel,tadayosi/camel,onders86/camel,mnki/camel,skinzer/camel,JYBESSON/camel,dsimansk/camel,gnodet/camel,ge0ffrey/camel,MrCoder/camel,mohanaraosv/camel,maschmid/camel,bhaveshdt/camel,curso007/camel,arnaud-deprez/camel,brreitme/camel,rmarting/camel,drsquidop/camel,davidwilliams1978/camel,MrCoder/camel,akhettar/camel,royopa/camel,isururanawaka/camel,logzio/camel,jamesnetherton/camel,cunningt/camel,rparree/camel,gautric/camel,satishgummadelli/camel,yury-vashchyla/camel,pkletsko/camel,anoordover/camel,nikvaessen/camel,tlehoux/camel,ssharma/camel,tkopczynski/camel,jonmcewen/camel,edigrid/camel,acartapanis/camel,rmarting/camel,koscejev/camel,alvinkwekel/camel,gautric/camel,NickCis/camel,jonmcewen/camel,bhaveshdt/camel,veithen/camel,mzapletal/camel,jlpedrosa/camel,atoulme/camel,coderczp/camel,johnpoth/camel,rmarting/camel,josefkarasek/camel,tarilabs/camel,jamesnetherton/camel,alvinkwekel/camel,YMartsynkevych/camel,jpav/camel,christophd/camel,gilfernandes/camel,stalet/camel,pplatek/camel,mgyongyosi/camel,veithen/camel,ekprayas/camel,jkorab/camel,qst-jdc-labs/camel,lowwool/camel,pax95/camel,snadakuduru/camel,tkopczynski/camel,bfitzpat/camel,woj-i/camel,sebi-hgdata/camel,MohammedHammam/camel,CodeSmell/camel,cunningt/camel,mcollovati/camel,Thopap/camel,mzapletal/camel,josefkarasek/camel,dmvolod/camel,anoordover/camel,DariusX/camel,haku/camel,dvankleef/camel,gyc567/camel,snadakuduru/camel,isavin/camel,christophd/camel,mike-kukla/camel,stravag/camel,bdecoste/camel,grgrzybek/camel,stravag/camel,maschmid/camel,hqstevenson/camel,Fabryprog/camel,RohanHart/camel,acartapanis/camel,noelo/camel,apache/camel,cunningt/camel,woj-i/camel,onders86/camel,jmandawg/camel,akhettar/camel,gautric/camel,sirlatrom/camel,oalles/camel,kevinearls/camel,FingolfinTEK/camel,CandleCandle/camel,jarst/camel,cunningt/camel,bfitzpat/camel,yogamaha/camel,snurmine/camel,gyc567/camel,gyc567/camel,driseley/camel,chanakaudaya/camel,josefkarasek/camel,edigrid/camel,adessaigne/camel,maschmid/camel,koscejev/camel,tadayosi/camel,ramonmaruko/camel,mike-kukla/camel,grgrzybek/camel,CodeSmell/camel,adessaigne/camel,MohammedHammam/camel,bdecoste/camel,CandleCandle/camel,salikjan/camel,lasombra/camel,sverkera/camel,lburgazzoli/apache-camel,atoulme/camel,koscejev/camel,punkhorn/camel-upstream,ssharma/camel,snadakuduru/camel,jameszkw/camel,davidwilliams1978/camel,dpocock/camel,yury-vashchyla/camel,oscerd/camel,isavin/camel,dkhanolkar/camel,mnki/camel,zregvart/camel,yogamaha/camel,mzapletal/camel,bfitzpat/camel,sverkera/camel,rparree/camel,borcsokj/camel,YMartsynkevych/camel,akhettar/camel,mgyongyosi/camel,driseley/camel,Thopap/camel,bhaveshdt/camel,w4tson/camel,nikhilvibhav/camel,grange74/camel,scranton/camel,CodeSmell/camel,logzio/camel,nboukhed/camel,noelo/camel,tkopczynski/camel,yury-vashchyla/camel,apache/camel,DariusX/camel,mike-kukla/camel,brreitme/camel,dvankleef/camel,chanakaudaya/camel,gyc567/camel,tdiesler/camel,bhaveshdt/camel,isururanawaka/camel,onders86/camel,nicolaferraro/camel,ge0ffrey/camel,sebi-hgdata/camel,stravag/camel,jollygeorge/camel,atoulme/camel,johnpoth/camel,ssharma/camel,ullgren/camel,joakibj/camel,isavin/camel,curso007/camel,punkhorn/camel-upstream,brreitme/camel,anoordover/camel,NetNow/camel,veithen/camel,akhettar/camel,davidwilliams1978/camel,lasombra/camel,mcollovati/camel,neoramon/camel,josefkarasek/camel,isavin/camel,ramonmaruko/camel,CandleCandle/camel,oalles/camel,snurmine/camel,manuelh9r/camel,oalles/camel,erwelch/camel,pkletsko/camel,jollygeorge/camel,bhaveshdt/camel,iweiss/camel,dmvolod/camel,sabre1041/camel,jmandawg/camel,lburgazzoli/apache-camel,yogamaha/camel,grange74/camel,nikhilvibhav/camel,yuruki/camel,pax95/camel,acartapanis/camel,NetNow/camel,yuruki/camel,gnodet/camel,jarst/camel,lburgazzoli/apache-camel,skinzer/camel,mgyongyosi/camel,joakibj/camel,duro1/camel,grgrzybek/camel,manuelh9r/camel,bdecoste/camel,jameszkw/camel,drsquidop/camel,zregvart/camel,manuelh9r/camel,bfitzpat/camel,mohanaraosv/camel,sirlatrom/camel,jmandawg/camel,rparree/camel,tdiesler/camel,grgrzybek/camel,anoordover/camel,askannon/camel,punkhorn/camel-upstream,hqstevenson/camel,eformat/camel,iweiss/camel,ramonmaruko/camel,dsimansk/camel,apache/camel,drsquidop/camel,royopa/camel,nboukhed/camel,askannon/camel,yuruki/camel,qst-jdc-labs/camel,royopa/camel,gnodet/camel,askannon/camel,jamesnetherton/camel,dmvolod/camel,curso007/camel,gilfernandes/camel,NickCis/camel,eformat/camel,arnaud-deprez/camel,lburgazzoli/camel,atoulme/camel,sabre1041/camel,onders86/camel,gilfernandes/camel,oscerd/camel,lasombra/camel,veithen/camel,apache/camel,ge0ffrey/camel,Thopap/camel,sirlatrom/camel,royopa/camel,lowwool/camel,pmoerenhout/camel,lburgazzoli/camel,kevinearls/camel,erwelch/camel,jpav/camel,YMartsynkevych/camel,ekprayas/camel,isavin/camel,iweiss/camel,MrCoder/camel,bgaudaen/camel,nboukhed/camel,NetNow/camel,trohovsky/camel,pax95/camel,grgrzybek/camel,oalles/camel,edigrid/camel,onders86/camel,lowwool/camel,gyc567/camel,YMartsynkevych/camel,logzio/camel,borcsokj/camel,tlehoux/camel,haku/camel,pplatek/camel,ssharma/camel,kevinearls/camel,Thopap/camel,manuelh9r/camel,pmoerenhout/camel,sabre1041/camel,yuruki/camel,tarilabs/camel,lburgazzoli/camel,prashant2402/camel,scranton/camel,rmarting/camel,gilfernandes/camel,JYBESSON/camel,dkhanolkar/camel,grange74/camel,brreitme/camel,gyc567/camel,dpocock/camel,sverkera/camel,ekprayas/camel,atoulme/camel,neoramon/camel,jollygeorge/camel,christophd/camel,logzio/camel,RohanHart/camel,chanakaudaya/camel,gautric/camel,oscerd/camel,joakibj/camel,yogamaha/camel,pax95/camel,lasombra/camel,borcsokj/camel,adessaigne/camel,stalet/camel,iweiss/camel,YoshikiHigo/camel,anoordover/camel,gilfernandes/camel,pplatek/camel,partis/camel,bgaudaen/camel,yury-vashchyla/camel,chirino/camel,bhaveshdt/camel,jlpedrosa/camel,pmoerenhout/camel,ramonmaruko/camel,borcsokj/camel,nboukhed/camel,tkopczynski/camel,DariusX/camel,dsimansk/camel,mgyongyosi/camel,ge0ffrey/camel,allancth/camel,jmandawg/camel,allancth/camel,sirlatrom/camel,NickCis/camel,salikjan/camel,mnki/camel,satishgummadelli/camel,ge0ffrey/camel,anton-k11/camel,zregvart/camel,objectiser/camel,driseley/camel,joakibj/camel,akhettar/camel,prashant2402/camel,stalet/camel,kevinearls/camel,dmvolod/camel,nikvaessen/camel,adessaigne/camel,pkletsko/camel,jollygeorge/camel,jkorab/camel,Fabryprog/camel,tdiesler/camel,jonmcewen/camel,lburgazzoli/camel,skinzer/camel,driseley/camel,tdiesler/camel,ullgren/camel,pmoerenhout/camel,mohanaraosv/camel,jmandawg/camel,mnki/camel,edigrid/camel,arnaud-deprez/camel,rparree/camel,tkopczynski/camel,jlpedrosa/camel,oscerd/camel,tlehoux/camel,jonmcewen/camel,qst-jdc-labs/camel,dsimansk/camel,coderczp/camel,bdecoste/camel,bgaudaen/camel,lburgazzoli/camel,lburgazzoli/apache-camel,tarilabs/camel,tarilabs/camel,bfitzpat/camel,sverkera/camel,YoshikiHigo/camel,bdecoste/camel,nikvaessen/camel,tdiesler/camel,MohammedHammam/camel,mohanaraosv/camel,curso007/camel,isururanawaka/camel,Fabryprog/camel,trohovsky/camel,MohammedHammam/camel,pmoerenhout/camel,prashant2402/camel,JYBESSON/camel,YMartsynkevych/camel,stravag/camel,jameszkw/camel,acartapanis/camel,davidwilliams1978/camel,pkletsko/camel,partis/camel,prashant2402/camel,chirino/camel,haku/camel,johnpoth/camel,jlpedrosa/camel,stalet/camel,w4tson/camel,jameszkw/camel,oscerd/camel,gautric/camel,dkhanolkar/camel,stravag/camel,dpocock/camel,acartapanis/camel,arnaud-deprez/camel,jollygeorge/camel,koscejev/camel,ramonmaruko/camel,dpocock/camel,anton-k11/camel,JYBESSON/camel,mike-kukla/camel,sabre1041/camel,CandleCandle/camel,Thopap/camel,jarst/camel,gautric/camel,eformat/camel,askannon/camel,gnodet/camel,edigrid/camel,pkletsko/camel,borcsokj/camel,qst-jdc-labs/camel,jarst/camel,noelo/camel,jpav/camel,drsquidop/camel,nicolaferraro/camel,sverkera/camel,neoramon/camel,partis/camel,partis/camel,JYBESSON/camel,askannon/camel,neoramon/camel,hqstevenson/camel,chanakaudaya/camel,duro1/camel,isururanawaka/camel,grange74/camel,dkhanolkar/camel,YoshikiHigo/camel,yury-vashchyla/camel,MrCoder/camel,jarst/camel,jonmcewen/camel,lowwool/camel,zregvart/camel,dmvolod/camel,jamesnetherton/camel,duro1/camel,lasombra/camel,rparree/camel,jamesnetherton/camel,jlpedrosa/camel,pmoerenhout/camel,tarilabs/camel,pplatek/camel,skinzer/camel,objectiser/camel,grange74/camel,ekprayas/camel,jameszkw/camel,RohanHart/camel,koscejev/camel,coderczp/camel,apache/camel,oalles/camel,koscejev/camel,dmvolod/camel,neoramon/camel,hqstevenson/camel,NickCis/camel,gilfernandes/camel,pkletsko/camel,jkorab/camel,anton-k11/camel,stravag/camel,chirino/camel,dkhanolkar/camel,erwelch/camel,adessaigne/camel,sabre1041/camel,skinzer/camel,veithen/camel,joakibj/camel,kevinearls/camel,nikvaessen/camel,objectiser/camel,dvankleef/camel,rparree/camel,adessaigne/camel,mzapletal/camel,woj-i/camel,chanakaudaya/camel,satishgummadelli/camel,mzapletal/camel,NetNow/camel,FingolfinTEK/camel,eformat/camel,haku/camel,snurmine/camel,johnpoth/camel,arnaud-deprez/camel,yuruki/camel,oscerd/camel,bdecoste/camel,satishgummadelli/camel,curso007/camel,coderczp/camel,NetNow/camel,hqstevenson/camel,satishgummadelli/camel,trohovsky/camel,davidkarlsen/camel,rmarting/camel,trohovsky/camel,gnodet/camel,cunningt/camel,qst-jdc-labs/camel,christophd/camel,jkorab/camel,johnpoth/camel,christophd/camel,nikvaessen/camel,stalet/camel,atoulme/camel,snurmine/camel,iweiss/camel,mnki/camel,trohovsky/camel,bgaudaen/camel,ramonmaruko/camel,hqstevenson/camel,sebi-hgdata/camel,maschmid/camel,kevinearls/camel,chirino/camel,scranton/camel,davidwilliams1978/camel,snadakuduru/camel,NickCis/camel,prashant2402/camel,dkhanolkar/camel,jkorab/camel,FingolfinTEK/camel,jollygeorge/camel,CandleCandle/camel,nikvaessen/camel,coderczp/camel,YoshikiHigo/camel,onders86/camel,alvinkwekel/camel,NetNow/camel,snadakuduru/camel,curso007/camel,jmandawg/camel,satishgummadelli/camel,lasombra/camel,nikhilvibhav/camel,noelo/camel,sirlatrom/camel,jamesnetherton/camel,lowwool/camel,tlehoux/camel,veithen/camel,noelo/camel,lburgazzoli/apache-camel,rmarting/camel,RohanHart/camel,mike-kukla/camel,yogamaha/camel,anton-k11/camel,pplatek/camel,haku/camel,isavin/camel,pplatek/camel,manuelh9r/camel,bgaudaen/camel,dvankleef/camel,coderczp/camel,nboukhed/camel,allancth/camel,logzio/camel,lburgazzoli/camel,FingolfinTEK/camel,YoshikiHigo/camel,nboukhed/camel,MrCoder/camel,trohovsky/camel,tadayosi/camel,snadakuduru/camel,ekprayas/camel,dsimansk/camel
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mybatis; import java.util.Iterator; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.util.ExchangeHelper; import org.apache.camel.util.ObjectHelper; import org.apache.ibatis.mapping.MappedStatement; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.SqlSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @version */ public class MyBatisProducer extends DefaultProducer { private static final Logger LOG = LoggerFactory.getLogger(MyBatisProducer.class); private String statement; private MyBatisEndpoint endpoint; public MyBatisProducer(MyBatisEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; this.statement = endpoint.getStatement(); } public void process(Exchange exchange) throws Exception { SqlSession session; ExecutorType executorType = endpoint.getExecutorType(); if (executorType == null) { session = endpoint.getSqlSessionFactory().openSession(); } else { session = endpoint.getSqlSessionFactory().openSession(executorType); } try { switch (endpoint.getStatementType()) { case SelectOne: doSelectOne(exchange, session); break; case SelectList: doSelectList(exchange, session); break; case Insert: doInsert(exchange, session); break; case InsertList: doInsertList(exchange, session); break; case Update: doUpdate(exchange, session); break; case UpdateList: doUpdateList(exchange, session); break; case Delete: doDelete(exchange, session); break; case DeleteList: doDeleteList(exchange, session); break; default: throw new IllegalArgumentException("Unsupported statementType: " + endpoint.getStatementType()); } // flush the batch statements and commit the database connection session.commit(); } catch (Exception e) { // discard the pending batch statements and roll the database connection back session.rollback(); throw e; } finally { // and finally close the session as we're done session.close(); } } private void doSelectOne(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { LOG.trace("SelectOne: {} using statement: {}", in, statement); result = session.selectOne(statement, in); } else { LOG.trace("SelectOne using statement: {}", statement); result = session.selectOne(statement); } doProcessResult(exchange, result, session); } private void doSelectList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { LOG.trace("SelectList: {} using statement: {}", in, statement); result = session.selectList(statement, in); } else { LOG.trace("SelectList using statement: {}", statement); result = session.selectList(statement); } doProcessResult(exchange, result, session); } private void doInsert(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Inserting: {} using statement: {}", value, statement); result = session.insert(statement, value); doProcessResult(exchange, result, session); } } else { LOG.trace("Inserting using statement: {}", statement); result = session.insert(statement); doProcessResult(exchange, result, session); } } private void doInsertList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Inserting: {} using statement: {}", in, statement); result = session.insert(statement, in); doProcessResult(exchange, result, session); } else { LOG.trace("Inserting using statement: {}", statement); result = session.insert(statement); doProcessResult(exchange, result, session); } } private void doUpdate(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Updating: {} using statement: {}", value, statement); result = session.update(statement, value); doProcessResult(exchange, result, session); } } else { LOG.trace("Updating using statement: {}", statement); result = session.update(statement); doProcessResult(exchange, result, session); } } private void doUpdateList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Updating: {} using statement: {}", in, statement); result = session.update(statement, in); doProcessResult(exchange, result, session); } else { LOG.trace("Updating using statement: {}", statement); result = session.update(statement); doProcessResult(exchange, result, session); } } private void doDelete(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Deleting: {} using statement: {}", value, statement); result = session.delete(statement, value); doProcessResult(exchange, result, session); } } else { LOG.trace("Deleting using statement: {}", statement); result = session.delete(statement); doProcessResult(exchange, result, session); } } private void doDeleteList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Deleting: {} using statement: {}", in, statement); result = session.delete(statement, in); doProcessResult(exchange, result, session); } else { LOG.trace("Deleting using statement: {}", statement); result = session.delete(statement); doProcessResult(exchange, result, session); } } private void doProcessResult(Exchange exchange, Object result, SqlSession session) { if (endpoint.getStatementType() == StatementType.SelectList || endpoint.getStatementType() == StatementType.SelectOne) { Message answer = exchange.getIn(); if (ExchangeHelper.isOutCapable(exchange)) { answer = exchange.getOut(); // preserve headers answer.getHeaders().putAll(exchange.getIn().getHeaders()); } // we should not set the body if its a stored procedure as the result is already in its OUT parameter MappedStatement ms = session.getConfiguration().getMappedStatement(statement); if (ms != null && ms.getStatementType() == org.apache.ibatis.mapping.StatementType.CALLABLE) { if (result == null) { LOG.trace("Setting result as existing body as MyBatis statement type is Callable, and there was no result."); answer.setBody(exchange.getIn().getBody()); } else { // set the result as body for insert LOG.trace("Setting result as body: {}", result); answer.setBody(result); } } else { // set the result as body for insert LOG.trace("Setting result as body: {}", result); answer.setBody(result); } answer.setHeader(MyBatisConstants.MYBATIS_RESULT, result); answer.setHeader(MyBatisConstants.MYBATIS_STATEMENT_NAME, statement); } else { Message msg = exchange.getIn(); msg.setHeader(MyBatisConstants.MYBATIS_RESULT, result); msg.setHeader(MyBatisConstants.MYBATIS_STATEMENT_NAME, statement); } } }
components/camel-mybatis/src/main/java/org/apache/camel/component/mybatis/MyBatisProducer.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mybatis; import java.util.Iterator; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.util.ExchangeHelper; import org.apache.camel.util.ObjectHelper; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.SqlSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @version */ public class MyBatisProducer extends DefaultProducer { private static final Logger LOG = LoggerFactory.getLogger(MyBatisProducer.class); private String statement; private MyBatisEndpoint endpoint; public MyBatisProducer(MyBatisEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; this.statement = endpoint.getStatement(); } public void process(Exchange exchange) throws Exception { SqlSession session; ExecutorType executorType = endpoint.getExecutorType(); if (executorType == null) { session = endpoint.getSqlSessionFactory().openSession(); } else { session = endpoint.getSqlSessionFactory().openSession(executorType); } try { switch (endpoint.getStatementType()) { case SelectOne: doSelectOne(exchange, session); break; case SelectList: doSelectList(exchange, session); break; case Insert: doInsert(exchange, session); break; case InsertList: doInsertList(exchange, session); break; case Update: doUpdate(exchange, session); break; case UpdateList: doUpdateList(exchange, session); break; case Delete: doDelete(exchange, session); break; case DeleteList: doDeleteList(exchange, session); break; default: throw new IllegalArgumentException("Unsupported statementType: " + endpoint.getStatementType()); } // flush the batch statements and commit the database connection session.commit(); } catch (Exception e) { // discard the pending batch statements and roll the database connection back session.rollback(); throw e; } finally { // and finally close the session as we're done session.close(); } } private void doSelectOne(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { LOG.trace("SelectOne: {} using statement: {}", in, statement); result = session.selectOne(statement, in); } else { LOG.trace("SelectOne using statement: {}", statement); result = session.selectOne(statement); } doProcessResult(exchange, result); } private void doSelectList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { LOG.trace("SelectList: {} using statement: {}", in, statement); result = session.selectList(statement, in); } else { LOG.trace("SelectList using statement: {}", statement); result = session.selectList(statement); } doProcessResult(exchange, result); } private void doInsert(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Inserting: {} using statement: {}", value, statement); result = session.insert(statement, value); doProcessResult(exchange, result); } } else { LOG.trace("Inserting using statement: {}", statement); result = session.insert(statement); doProcessResult(exchange, result); } } private void doInsertList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Inserting: {} using statement: {}", in, statement); result = session.insert(statement, in); doProcessResult(exchange, result); } else { LOG.trace("Inserting using statement: {}", statement); result = session.insert(statement); doProcessResult(exchange, result); } } private void doUpdate(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Updating: {} using statement: {}", value, statement); result = session.update(statement, value); doProcessResult(exchange, result); } } else { LOG.trace("Updating using statement: {}", statement); result = session.update(statement); doProcessResult(exchange, result); } } private void doUpdateList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Updating: {} using statement: {}", in, statement); result = session.update(statement, in); doProcessResult(exchange, result); } else { LOG.trace("Updating using statement: {}", statement); result = session.update(statement); doProcessResult(exchange, result); } } private void doDelete(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // lets handle arrays or collections of objects Iterator<?> iter = ObjectHelper.createIterator(in); while (iter.hasNext()) { Object value = iter.next(); LOG.trace("Deleting: {} using statement: {}", value, statement); result = session.delete(statement, value); doProcessResult(exchange, result); } } else { LOG.trace("Deleting using statement: {}", statement); result = session.delete(statement); doProcessResult(exchange, result); } } private void doDeleteList(Exchange exchange, SqlSession session) throws Exception { Object result; Object in = exchange.getIn().getBody(); if (in != null) { // just pass in the body as Object and allow MyBatis to iterate using its own foreach statement LOG.trace("Deleting: {} using statement: {}", in, statement); result = session.delete(statement, in); doProcessResult(exchange, result); } else { LOG.trace("Deleting using statement: {}", statement); result = session.delete(statement); doProcessResult(exchange, result); } } private void doProcessResult(Exchange exchange, Object result) { if (endpoint.getStatementType() == StatementType.SelectList || endpoint.getStatementType() == StatementType.SelectOne) { Message answer = exchange.getIn(); if (ExchangeHelper.isOutCapable(exchange)) { answer = exchange.getOut(); // preserve headers answer.getHeaders().putAll(exchange.getIn().getHeaders()); } // set the result as body for insert answer.setBody(result); answer.setHeader(MyBatisConstants.MYBATIS_RESULT, result); answer.setHeader(MyBatisConstants.MYBATIS_STATEMENT_NAME, statement); } else { Message msg = exchange.getIn(); msg.setHeader(MyBatisConstants.MYBATIS_RESULT, result); msg.setHeader(MyBatisConstants.MYBATIS_STATEMENT_NAME, statement); } } }
CAMEL-7432: camel-mybatis should keep message body as-is if calling stored procedure and it did not return data, but are using OUT parameters etc.
components/camel-mybatis/src/main/java/org/apache/camel/component/mybatis/MyBatisProducer.java
CAMEL-7432: camel-mybatis should keep message body as-is if calling stored procedure and it did not return data, but are using OUT parameters etc.
Java
apache-2.0
10caf695ad8128384759683552b8d270e67d00d1
0
oehf/ipf,oehf/ipf,oehf/ipf,oehf/ipf
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openehealth.ipf.commons.ihe.xua; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.cxf.binding.soap.SoapMessage; import org.apache.cxf.headers.Header; import org.openehealth.ipf.commons.ihe.ws.cxf.audit.AbstractAuditInterceptor; import org.openehealth.ipf.commons.ihe.ws.cxf.audit.WsAuditDataset; import org.openehealth.ipf.commons.ihe.ws.cxf.audit.XuaProcessor; import org.openhealthtools.ihe.atna.auditor.models.rfc3881.CodedValueType; import org.opensaml.core.config.ConfigurationService; import org.opensaml.core.config.InitializationException; import org.opensaml.core.config.InitializationService; import org.opensaml.core.xml.XMLObject; import org.opensaml.core.xml.config.XMLObjectProviderRegistry; import org.opensaml.core.xml.io.Unmarshaller; import org.opensaml.core.xml.io.UnmarshallerFactory; import org.opensaml.core.xml.io.UnmarshallingException; import org.opensaml.saml.common.xml.SAMLConstants; import org.opensaml.saml.saml2.core.Assertion; import org.opensaml.saml.saml2.core.Attribute; import org.opensaml.saml.saml2.core.AttributeStatement; import org.opensaml.soap.wssecurity.WSSecurityConstants; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import static org.openehealth.ipf.commons.ihe.ws.utils.SoapUtils.*; import javax.xml.namespace.QName; import java.util.*; /** * @see <a href="http://docs.oasis-open.org/xacml/xspa/v1.0/xacml-xspa-1.0-os.html">Cross-Enterprise Security * and Privacy Authorization (XSPA) Profile of XACML v2.0 for Healthcare Version 1.0</a> * * @author Dmytro Rud */ @Slf4j public class BasicXuaProcessor implements XuaProcessor { /** * If a SAML assertion is stored under this key in the Web Service context, * IPF will use it instead of parsing the WS-Security header by itself. * If there are no Web Service context element under this key, or if this element * does not contain a SAML assertion, IPF will parse the WS-Security header * and store the assertion extracted from there (if any) under this key. */ public static final String XUA_SAML_ASSERTION = AbstractAuditInterceptor.class.getName() + ".XUA_SAML_ASSERTION"; public static final Set<String> WSSE_NS_URIS = new HashSet<>(Arrays.asList( WSSecurityConstants.WSSE_NS, WSSecurityConstants.WSSE11_NS)); public static final String PURPOSE_OF_USE_ATTRIBUTE_NAME = "urn:oasis:names:tc:xspa:1.0:subject:purposeofuse"; public static final String SUBJECT_ROLE_ATTRIBUTE_NAME = "urn:oasis:names:tc:xacml:2.0:subject:role"; public static final String PATIENT_ID_ATTRIBUTE_NAME = "urn:oasis:names:tc:xacml:2.0:resource:resource-id"; public static final QName PURPOSE_OF_USE_ELEMENT_NAME = new QName("urn:hl7-org:v3", "PurposeOfUse"); public static final QName SUBJECT_ROLE_ELEMENT_NAME = new QName("urn:hl7-org:v3", "Role"); private static final UnmarshallerFactory SAML_UNMARSHALLER_FACTORY; static { try { InitializationService.initialize(); XMLObjectProviderRegistry registry = ConfigurationService.get(XMLObjectProviderRegistry.class); SAML_UNMARSHALLER_FACTORY = registry.getUnmarshallerFactory(); } catch (InitializationException e) { throw new RuntimeException(e); } } private static Element extractAssertionElementFromCxfMessage(SoapMessage message, Header.Direction headerDirection) { Header header = message.getHeader(new QName(WSSecurityConstants.WSSE_NS, "Security")); if (!((header != null) && headerDirection.equals(header.getDirection()) && (header.getObject() instanceof Element))) { return null; } Element headerElem = (Element) header.getObject(); NodeList nodeList = headerElem.getElementsByTagNameNS(SAMLConstants.SAML20_NS, "Assertion"); return (Element) nodeList.item(0); } private static Element extractAssertionElementFromDom(SoapMessage message) { Document document = (Document) message.getContent(Node.class); if (document == null) { return null; } Element element = getElementNS(document.getDocumentElement(), SOAP_NS_URIS, "Header"); element = getElementNS(element, WSSE_NS_URIS, "Security"); return getElementNS(element, Collections.singleton(SAMLConstants.SAML20_NS), "Assertion"); } /** * Extracts ITI-40 XUA user name from the SAML2 assertion contained * in the given CXF message, and stores it in the ATNA audit dataset. * * @param message source CXF message. * @param headerDirection direction of the header containing the SAML2 assertion. * @param auditDataset target ATNA audit dataset. */ public void extractXuaUserNameFromSaml2Assertion( SoapMessage message, Header.Direction headerDirection, WsAuditDataset auditDataset) { Assertion assertion = null; // check whether someone has already parsed the SAML2 assertion Object o = message.getContextualProperty(XUA_SAML_ASSERTION); if (o instanceof Assertion) { assertion = (Assertion) o; } // extract SAML assertion the from WS-Security SOAP header if (assertion == null) { Element assertionElem = extractAssertionElementFromCxfMessage(message, headerDirection); if (assertionElem == null) { assertionElem = extractAssertionElementFromDom(message); } if (assertionElem == null) { return; } Unmarshaller unmarshaller = SAML_UNMARSHALLER_FACTORY.getUnmarshaller(assertionElem); try { assertion = (Assertion) unmarshaller.unmarshall(assertionElem); } catch (UnmarshallingException e) { log.warn("Cannot extract SAML assertion from the WS-Security SOAP header", e); return; } message.getExchange().put(XUA_SAML_ASSERTION, assertion); } // set ATNA XUA userName element String userName = ((assertion.getSubject() != null) && (assertion.getSubject().getNameID() != null)) ? assertion.getSubject().getNameID().getValue() : null; String issuer = (assertion.getIssuer() != null) ? assertion.getIssuer().getValue() : null; if (StringUtils.isNoneEmpty(issuer, userName)) { String spProvidedId = StringUtils.stripToEmpty(assertion.getSubject().getNameID().getSPProvidedID()); auditDataset.setUserName(spProvidedId + '<' + userName + '@' + issuer + '>'); } // collect purposes of use, user role codes, and the patient ID for (AttributeStatement statement : assertion.getAttributeStatements()) { for (Attribute attribute : statement.getAttributes()) { if (PURPOSE_OF_USE_ATTRIBUTE_NAME.equals(attribute.getName())) { extractCodes(attribute, PURPOSE_OF_USE_ELEMENT_NAME, auditDataset.getPurposesOfUse()); } else if (SUBJECT_ROLE_ATTRIBUTE_NAME.equals(attribute.getName())) { extractCodes(attribute, SUBJECT_ROLE_ELEMENT_NAME, auditDataset.getUserRoles()); } else if (PATIENT_ID_ATTRIBUTE_NAME.equals(attribute.getName())) { List<XMLObject> attributeValues = attribute.getAttributeValues(); if ((attributeValues != null) && (!attributeValues.isEmpty()) && (attributeValues.get(0) != null) && (attributeValues.get(0).getDOM() != null)) { auditDataset.setXuaPatientId(attributeValues.get(0).getDOM().getTextContent()); } } } } } private static void extractCodes(Attribute attribute, QName valueElementName, List<CodedValueType> targetCollection) { for (XMLObject value : attribute.getAttributeValues()) { if (value.getDOM() != null) { NodeList nodeList = value.getDOM().getElementsByTagNameNS(valueElementName.getNamespaceURI(), valueElementName.getLocalPart()); for (int i = 0; i < nodeList.getLength(); ++i) { Element elem = (Element) nodeList.item(i); targetCollection.add(elementToCode(elem)); } } } } private static CodedValueType elementToCode(Element element) { CodedValueType cvt = new CodedValueType(); cvt.setCode(element.getAttribute("code")); cvt.setCodeSystemName(element.getAttribute("codeSystem")); cvt.setOriginalText(element.getAttribute("displayName")); return cvt; } }
commons/ihe/xua/src/main/java/org/openehealth/ipf/commons/ihe/xua/BasicXuaProcessor.java
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openehealth.ipf.commons.ihe.xua; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.cxf.binding.soap.SoapMessage; import org.apache.cxf.headers.Header; import org.openehealth.ipf.commons.ihe.ws.cxf.audit.AbstractAuditInterceptor; import org.openehealth.ipf.commons.ihe.ws.cxf.audit.WsAuditDataset; import org.openehealth.ipf.commons.ihe.ws.cxf.audit.XuaProcessor; import org.openhealthtools.ihe.atna.auditor.models.rfc3881.CodedValueType; import org.opensaml.core.config.ConfigurationService; import org.opensaml.core.config.InitializationException; import org.opensaml.core.config.InitializationService; import org.opensaml.core.xml.XMLObject; import org.opensaml.core.xml.config.XMLObjectProviderRegistry; import org.opensaml.core.xml.io.Unmarshaller; import org.opensaml.core.xml.io.UnmarshallerFactory; import org.opensaml.core.xml.io.UnmarshallingException; import org.opensaml.saml.common.xml.SAMLConstants; import org.opensaml.saml.saml2.core.Assertion; import org.opensaml.saml.saml2.core.Attribute; import org.opensaml.saml.saml2.core.AttributeStatement; import org.opensaml.soap.wssecurity.WSSecurityConstants; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import static org.openehealth.ipf.commons.ihe.ws.utils.SoapUtils.*; import javax.xml.namespace.QName; import java.util.*; /** * @see <a href="http://docs.oasis-open.org/xacml/xspa/v1.0/xacml-xspa-1.0-os.html">Cross-Enterprise Security * and Privacy Authorization (XSPA) Profile of XACML v2.0 for Healthcare Version 1.0</a> * * @author Dmytro Rud */ @Slf4j public class BasicXuaProcessor implements XuaProcessor { /** * If a SAML assertion is stored under this key in the Web Service context, * IPF will use it instead of parsing the WS-Security header by itself. * If there are no Web Service context element under this key, or if this element * does not contain a SAML assertion, IPF will parse the WS-Security header * and store the assertion extracted from there (if any) under this key. */ public static final String XUA_SAML_ASSERTION = AbstractAuditInterceptor.class.getName() + ".XUA_SAML_ASSERTION"; public static final Set<String> WSSE_NS_URIS = new HashSet<>(Arrays.asList( WSSecurityConstants.WSSE_NS, WSSecurityConstants.WSSE11_NS)); public static final String PURPOSE_OF_USE_ATTRIBUTE_NAME = "urn:oasis:names:tc:xspa:1.0:subject:purposeofuse"; public static final String SUBJECT_ROLE_ATTRIBUTE_NAME = "urn:oasis:names:tc:xacml:2.0:subject:role"; public static final String PATIENT_ID_ATTRIBUTE_NAME = "urn:oasis:names:tc:xacml:2.0:resource:resource-id"; public static final QName PURPOSE_OF_USE_ELEMENT_NAME = new QName("urn:hl7-org:v3", "PurposeOfUse"); public static final QName SUBJECT_ROLE_ELEMENT_NAME = new QName("urn:hl7-org:v3", "Role"); private static final UnmarshallerFactory SAML_UNMARSHALLER_FACTORY; static { try { InitializationService.initialize(); XMLObjectProviderRegistry registry = ConfigurationService.get(XMLObjectProviderRegistry.class); SAML_UNMARSHALLER_FACTORY = registry.getUnmarshallerFactory(); } catch (InitializationException e) { throw new RuntimeException(e); } } private static Element extractAssertionElementFromCxfMessage(SoapMessage message, Header.Direction headerDirection) { Header header = message.getHeader(new QName(WSSecurityConstants.WSSE_NS, "Security")); if (!((header != null) && headerDirection.equals(header.getDirection()) && (header.getObject() instanceof Element))) { return null; } Element headerElem = (Element) header.getObject(); NodeList nodeList = headerElem.getElementsByTagNameNS(SAMLConstants.SAML20_NS, "Assertion"); return (Element) nodeList.item(0); } private static Element extractAssertionElementFromDom(SoapMessage message) { Document document = (Document) message.getContent(Node.class); if (document == null) { return null; } Element element = getElementNS(document.getDocumentElement(), SOAP_NS_URIS, "Header"); element = getElementNS(element, WSSE_NS_URIS, "Security"); return getElementNS(element, Collections.singleton(SAMLConstants.SAML20_NS), "Assertion"); } /** * Extracts ITI-40 XUA user name from the SAML2 assertion contained * in the given CXF message, and stores it in the ATNA audit dataset. * * @param message source CXF message. * @param headerDirection direction of the header containing the SAML2 assertion. * @param auditDataset target ATNA audit dataset. */ public void extractXuaUserNameFromSaml2Assertion( SoapMessage message, Header.Direction headerDirection, WsAuditDataset auditDataset) { Assertion assertion = null; // check whether someone has already parsed the SAML2 assertion Object o = message.getContextualProperty(XUA_SAML_ASSERTION); if (o instanceof Assertion) { assertion = (Assertion) o; } // extract SAML assertion the from WS-Security SOAP header if (assertion == null) { Element assertionElem = extractAssertionElementFromCxfMessage(message, headerDirection); if (assertionElem == null) { assertionElem = extractAssertionElementFromDom(message); } if (assertionElem == null) { return; } Unmarshaller unmarshaller = SAML_UNMARSHALLER_FACTORY.getUnmarshaller(assertionElem); try { assertion = (Assertion) unmarshaller.unmarshall(assertionElem); } catch (UnmarshallingException e) { log.warn("Cannot extract SAML assertion from the WS-Security SOAP header", e); return; } message.getExchange().put(XUA_SAML_ASSERTION, assertion); } // set ATNA XUA userName element String userName = ((assertion.getSubject() != null) && (assertion.getSubject().getNameID() != null)) ? assertion.getSubject().getNameID().getValue() : null; String issuer = (assertion.getIssuer() != null) ? assertion.getIssuer().getValue() : null; if (StringUtils.isNoneEmpty(issuer, userName)) { String spProvidedId = StringUtils.stripToEmpty(assertion.getSubject().getNameID().getSPProvidedID()); auditDataset.setUserName(spProvidedId + '<' + userName + '@' + issuer + '>'); } // collect purposes of use, user role codes, and the patient ID for (AttributeStatement statement : assertion.getAttributeStatements()) { for (Attribute attribute : statement.getAttributes()) { if (PURPOSE_OF_USE_ATTRIBUTE_NAME.equals(attribute.getName())) { extractCodes(attribute, PURPOSE_OF_USE_ELEMENT_NAME, auditDataset.getPurposesOfUse()); } else if (SUBJECT_ROLE_ATTRIBUTE_NAME.equals(attribute.getName())) { extractCodes(attribute, SUBJECT_ROLE_ELEMENT_NAME, auditDataset.getUserRoles()); } else if (PATIENT_ID_ATTRIBUTE_NAME.equals(attribute.getName())) { List<XMLObject> attributeValues = attribute.getAttributeValues(); if ((attributeValues != null) && (!attributeValues.isEmpty()) && (attributeValues.get(0) != null) && (attributeValues.get(0).getDOM() != null)) { auditDataset.setXuaPatientId(attributeValues.get(0).getDOM().getTextContent()); } } } } } private static void extractCodes(Attribute attribute, QName valueElementName, List<CodedValueType> targetCollection) { for (XMLObject value : attribute.getAttributeValues()) { if (value.getDOM() != null) { NodeList nodeList = value.getDOM().getElementsByTagNameNS(valueElementName.getNamespaceURI(), valueElementName.getLocalPart()); for (int i = 0; i < nodeList.getLength(); ++i) { Element elem = (Element) nodeList.item(i); targetCollection.add(elementToCode(elem)); } } } } private static CodedValueType elementToCode(Element element) { CodedValueType cvt = new CodedValueType(); cvt.setCode(element.getAttribute("code")); cvt.setCodeSystemName(element.getAttribute("codeSystemName")); cvt.setOriginalText(element.getAttribute("displayName")); return cvt; } }
fix XUA processor
commons/ihe/xua/src/main/java/org/openehealth/ipf/commons/ihe/xua/BasicXuaProcessor.java
fix XUA processor
Java
apache-2.0
7de14775ee1298bfe133c33efdfe9614f1039339
0
debezium/debezium,debezium/debezium,debezium/debezium,debezium/debezium
/* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.connector.oracle; import java.time.Instant; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.Struct; import io.debezium.connector.SnapshotRecord; import io.debezium.pipeline.source.snapshot.incremental.IncrementalSnapshotContext; import io.debezium.pipeline.spi.OffsetContext; import io.debezium.pipeline.txmetadata.TransactionContext; import io.debezium.relational.TableId; import io.debezium.schema.DataCollectionId; public class OracleOffsetContext implements OffsetContext { public static final String SNAPSHOT_COMPLETED_KEY = "snapshot_completed"; public static final String SNAPSHOT_PENDING_TRANSACTIONS_KEY = "snapshot_pending_tx"; public static final String SNAPSHOT_SCN_KEY = "snapshot_scn"; private final Schema sourceInfoSchema; private final SourceInfo sourceInfo; private final TransactionContext transactionContext; private final IncrementalSnapshotContext<TableId> incrementalSnapshotContext; /** * SCN that was used for the initial consistent snapshot. * * We keep track of this field because it's a cutoff for emitting DDL statements, * in case we start mining _before_ the snapshot SCN to cover transactions that were * ongoing at the time the snapshot was taken. */ private final Scn snapshotScn; /** * Map of (txid, start SCN) for all transactions in progress at the time the * snapshot was taken. */ private Map<String, Scn> snapshotPendingTransactions; /** * Whether a snapshot has been completed or not. */ private boolean snapshotCompleted; public OracleOffsetContext(OracleConnectorConfig connectorConfig, Scn scn, Scn commitScn, String lcrPosition, Scn snapshotScn, Map<String, Scn> snapshotPendingTransactions, boolean snapshot, boolean snapshotCompleted, TransactionContext transactionContext, IncrementalSnapshotContext<TableId> incrementalSnapshotContext) { this(connectorConfig, scn, lcrPosition, snapshotScn, snapshotPendingTransactions, snapshot, snapshotCompleted, transactionContext, incrementalSnapshotContext); sourceInfo.setCommitScn(commitScn); } public OracleOffsetContext(OracleConnectorConfig connectorConfig, Scn scn, String lcrPosition, Scn snapshotScn, Map<String, Scn> snapshotPendingTransactions, boolean snapshot, boolean snapshotCompleted, TransactionContext transactionContext, IncrementalSnapshotContext<TableId> incrementalSnapshotContext) { sourceInfo = new SourceInfo(connectorConfig); sourceInfo.setScn(scn); sourceInfo.setLcrPosition(lcrPosition); sourceInfoSchema = sourceInfo.schema(); // Snapshot SCN is a new field and may be null in cases where the offsets are being read from // and older version of Debezium. In this case, we need to explicitly enforce Scn#NULL usage // when the value is null. this.snapshotScn = snapshotScn == null ? Scn.NULL : snapshotScn; this.snapshotPendingTransactions = snapshotPendingTransactions; this.transactionContext = transactionContext; this.incrementalSnapshotContext = incrementalSnapshotContext; this.snapshotCompleted = snapshotCompleted; if (this.snapshotCompleted) { postSnapshotCompletion(); } else { sourceInfo.setSnapshot(snapshot ? SnapshotRecord.TRUE : SnapshotRecord.FALSE); } } public static class Builder { private OracleConnectorConfig connectorConfig; private Scn scn; private String lcrPosition; private boolean snapshot; private boolean snapshotCompleted; private TransactionContext transactionContext; private IncrementalSnapshotContext<TableId> incrementalSnapshotContext; private Map<String, Scn> snapshotPendingTransactions; private Scn snapshotScn; public Builder logicalName(OracleConnectorConfig connectorConfig) { this.connectorConfig = connectorConfig; return this; } public Builder scn(Scn scn) { this.scn = scn; return this; } public Builder lcrPosition(String lcrPosition) { this.lcrPosition = lcrPosition; return this; } public Builder snapshot(boolean snapshot) { this.snapshot = snapshot; return this; } public Builder snapshotCompleted(boolean snapshotCompleted) { this.snapshotCompleted = snapshotCompleted; return this; } public Builder transactionContext(TransactionContext transactionContext) { this.transactionContext = transactionContext; return this; } public Builder incrementalSnapshotContext(IncrementalSnapshotContext<TableId> incrementalSnapshotContext) { this.incrementalSnapshotContext = incrementalSnapshotContext; return this; } public Builder snapshotPendingTransactions(Map<String, Scn> snapshotPendingTransactions) { this.snapshotPendingTransactions = snapshotPendingTransactions; return this; } public Builder snapshotScn(Scn scn) { this.snapshotScn = scn; return this; } OracleOffsetContext build() { return new OracleOffsetContext(connectorConfig, scn, lcrPosition, snapshotScn, snapshotPendingTransactions, snapshot, snapshotCompleted, transactionContext, incrementalSnapshotContext); } } public static Builder create() { return new Builder(); } @Override public Map<String, ?> getOffset() { if (sourceInfo.isSnapshot()) { Map<String, Object> offset = new HashMap<>(); final Scn scn = sourceInfo.getScn(); offset.put(SourceInfo.SCN_KEY, scn != null ? scn.toString() : scn); offset.put(SourceInfo.SNAPSHOT_KEY, true); offset.put(SNAPSHOT_COMPLETED_KEY, snapshotCompleted); if (snapshotPendingTransactions != null) { String encoded = snapshotPendingTransactions.entrySet().stream() .map(e -> e.getKey() + ":" + e.getValue().toString()) .collect(Collectors.joining(",")); offset.put(SNAPSHOT_PENDING_TRANSACTIONS_KEY, encoded); } offset.put(SNAPSHOT_SCN_KEY, snapshotScn != null ? snapshotScn.toString() : null); return offset; } else { final Map<String, Object> offset = new HashMap<>(); if (sourceInfo.getLcrPosition() != null) { offset.put(SourceInfo.LCR_POSITION_KEY, sourceInfo.getLcrPosition()); } else { final Scn scn = sourceInfo.getScn(); final Scn commitScn = sourceInfo.getCommitScn(); offset.put(SourceInfo.SCN_KEY, scn != null ? scn.toString() : null); offset.put(SourceInfo.COMMIT_SCN_KEY, commitScn != null ? commitScn.toString() : null); } if (snapshotPendingTransactions != null) { String encoded = snapshotPendingTransactions.entrySet().stream() .map(e -> e.getKey() + ":" + e.getValue().toString()) .collect(Collectors.joining(",")); offset.put(SNAPSHOT_PENDING_TRANSACTIONS_KEY, encoded); } offset.put(SNAPSHOT_SCN_KEY, snapshotScn != null ? snapshotScn.toString() : null); return incrementalSnapshotContext.store(transactionContext.store(offset)); } } @Override public Schema getSourceInfoSchema() { return sourceInfoSchema; } @Override public Struct getSourceInfo() { return sourceInfo.struct(); } public void setScn(Scn scn) { sourceInfo.setScn(scn); } public void setCommitScn(Scn commitScn) { sourceInfo.setCommitScn(commitScn); } public Scn getScn() { return sourceInfo.getScn(); } public Scn getCommitScn() { return sourceInfo.getCommitScn(); } public void setLcrPosition(String lcrPosition) { sourceInfo.setLcrPosition(lcrPosition); } public String getLcrPosition() { return sourceInfo.getLcrPosition(); } public Scn getSnapshotScn() { return snapshotScn; } public Map<String, Scn> getSnapshotPendingTransactions() { return snapshotPendingTransactions; } public void setSnapshotPendingTransactions(Map<String, Scn> snapshotPendingTransactions) { this.snapshotPendingTransactions = snapshotPendingTransactions; } public void setTransactionId(String transactionId) { sourceInfo.setTransactionId(transactionId); } public void setSourceTime(Instant instant) { sourceInfo.setSourceTime(instant); } public void setTableId(TableId tableId) { sourceInfo.tableEvent(tableId); } @Override public boolean isSnapshotRunning() { return sourceInfo.isSnapshot() && !snapshotCompleted; } @Override public void preSnapshotStart() { sourceInfo.setSnapshot(SnapshotRecord.TRUE); snapshotCompleted = false; } @Override public void preSnapshotCompletion() { snapshotCompleted = true; } @Override public void postSnapshotCompletion() { sourceInfo.setSnapshot(SnapshotRecord.FALSE); } @Override public String toString() { StringBuilder sb = new StringBuilder("OracleOffsetContext [scn=").append(getScn()); if (sourceInfo.isSnapshot()) { sb.append(", snapshot=").append(sourceInfo.isSnapshot()); sb.append(", snapshot_completed=").append(snapshotCompleted); } sb.append("]"); return sb.toString(); } @Override public void markLastSnapshotRecord() { sourceInfo.setSnapshot(SnapshotRecord.LAST); } @Override public void event(DataCollectionId tableId, Instant timestamp) { sourceInfo.tableEvent((TableId) tableId); sourceInfo.setSourceTime(timestamp); } public void tableEvent(TableId tableId, Instant timestamp) { sourceInfo.setSourceTime(timestamp); sourceInfo.tableEvent(tableId); } public void tableEvent(Set<TableId> tableIds, Instant timestamp) { sourceInfo.setSourceTime(timestamp); sourceInfo.tableEvent(tableIds); } @Override public TransactionContext getTransactionContext() { return transactionContext; } @Override public void incrementalSnapshotEvents() { sourceInfo.setSnapshot(SnapshotRecord.INCREMENTAL); } @Override public IncrementalSnapshotContext<?> getIncrementalSnapshotContext() { return incrementalSnapshotContext; } /** * Helper method to resolve a {@link Scn} by key from the offset map. * * @param offset the offset map * @param key the entry key, either {@link SourceInfo#SCN_KEY} or {@link SourceInfo#COMMIT_SCN_KEY}. * @return the {@link Scn} or null if not found */ public static Scn getScnFromOffsetMapByKey(Map<String, ?> offset, String key) { Object scn = offset.get(key); if (scn instanceof String) { return Scn.valueOf((String) scn); } else if (scn != null) { return Scn.valueOf((Long) scn); } return null; } /** * Helper method to read the in-progress transaction map from the offset map. * * @param offset the offset map * @return the in-progress transaction map */ public static Map<String, Scn> loadSnapshotPendingTransactions(Map<String, ?> offset) { Map<String, Scn> snapshotPendingTransactions = new HashMap<>(); String encoded = (String) offset.get(SNAPSHOT_PENDING_TRANSACTIONS_KEY); if (encoded != null) { Arrays.stream(encoded.split(",")) .map(String::trim) .filter(s -> !s.isEmpty()) .forEach(e -> { String[] parts = e.split(":", 2); String txid = parts[0]; Scn startScn = Scn.valueOf(parts[1]); snapshotPendingTransactions.put(txid, startScn); }); } return snapshotPendingTransactions; } /** * Helper method to read the snapshot SCN from the offset map. * * @param offset the offset map * @return the snapshot SCN */ public static Scn loadSnapshotScn(Map<String, ?> offset) { return getScnFromOffsetMapByKey(offset, SNAPSHOT_SCN_KEY); } }
debezium-connector-oracle/src/main/java/io/debezium/connector/oracle/OracleOffsetContext.java
/* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.connector.oracle; import java.time.Instant; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.Struct; import io.debezium.connector.SnapshotRecord; import io.debezium.pipeline.source.snapshot.incremental.IncrementalSnapshotContext; import io.debezium.pipeline.spi.OffsetContext; import io.debezium.pipeline.txmetadata.TransactionContext; import io.debezium.relational.TableId; import io.debezium.schema.DataCollectionId; public class OracleOffsetContext implements OffsetContext { public static final String SNAPSHOT_COMPLETED_KEY = "snapshot_completed"; public static final String SNAPSHOT_PENDING_TRANSACTIONS_KEY = "snapshot_pending_tx"; public static final String SNAPSHOT_SCN_KEY = "snapshot_scn"; private final Schema sourceInfoSchema; private final SourceInfo sourceInfo; private final TransactionContext transactionContext; private final IncrementalSnapshotContext<TableId> incrementalSnapshotContext; /** * SCN that was used for the initial consistent snapshot. * * We keep track of this field because it's a cutoff for emitting DDL statements, * in case we start mining _before_ the snapshot SCN to cover transactions that were * ongoing at the time the snapshot was taken. */ private final Scn snapshotScn; /** * Map of (txid, start SCN) for all transactions in progress at the time the * snapshot was taken. */ private Map<String, Scn> snapshotPendingTransactions; /** * Whether a snapshot has been completed or not. */ private boolean snapshotCompleted; public OracleOffsetContext(OracleConnectorConfig connectorConfig, Scn scn, Scn commitScn, String lcrPosition, Scn snapshotScn, Map<String, Scn> snapshotPendingTransactions, boolean snapshot, boolean snapshotCompleted, TransactionContext transactionContext, IncrementalSnapshotContext<TableId> incrementalSnapshotContext) { this(connectorConfig, scn, lcrPosition, snapshotScn, snapshotPendingTransactions, snapshot, snapshotCompleted, transactionContext, incrementalSnapshotContext); sourceInfo.setCommitScn(commitScn); } public OracleOffsetContext(OracleConnectorConfig connectorConfig, Scn scn, String lcrPosition, Scn snapshotScn, Map<String, Scn> snapshotPendingTransactions, boolean snapshot, boolean snapshotCompleted, TransactionContext transactionContext, IncrementalSnapshotContext<TableId> incrementalSnapshotContext) { sourceInfo = new SourceInfo(connectorConfig); sourceInfo.setScn(scn); sourceInfo.setLcrPosition(lcrPosition); sourceInfoSchema = sourceInfo.schema(); this.snapshotScn = snapshotScn; this.snapshotPendingTransactions = snapshotPendingTransactions; this.transactionContext = transactionContext; this.incrementalSnapshotContext = incrementalSnapshotContext; this.snapshotCompleted = snapshotCompleted; if (this.snapshotCompleted) { postSnapshotCompletion(); } else { sourceInfo.setSnapshot(snapshot ? SnapshotRecord.TRUE : SnapshotRecord.FALSE); } } public static class Builder { private OracleConnectorConfig connectorConfig; private Scn scn; private String lcrPosition; private boolean snapshot; private boolean snapshotCompleted; private TransactionContext transactionContext; private IncrementalSnapshotContext<TableId> incrementalSnapshotContext; private Map<String, Scn> snapshotPendingTransactions; private Scn snapshotScn; public Builder logicalName(OracleConnectorConfig connectorConfig) { this.connectorConfig = connectorConfig; return this; } public Builder scn(Scn scn) { this.scn = scn; return this; } public Builder lcrPosition(String lcrPosition) { this.lcrPosition = lcrPosition; return this; } public Builder snapshot(boolean snapshot) { this.snapshot = snapshot; return this; } public Builder snapshotCompleted(boolean snapshotCompleted) { this.snapshotCompleted = snapshotCompleted; return this; } public Builder transactionContext(TransactionContext transactionContext) { this.transactionContext = transactionContext; return this; } public Builder incrementalSnapshotContext(IncrementalSnapshotContext<TableId> incrementalSnapshotContext) { this.incrementalSnapshotContext = incrementalSnapshotContext; return this; } public Builder snapshotPendingTransactions(Map<String, Scn> snapshotPendingTransactions) { this.snapshotPendingTransactions = snapshotPendingTransactions; return this; } public Builder snapshotScn(Scn scn) { this.snapshotScn = scn; return this; } OracleOffsetContext build() { return new OracleOffsetContext(connectorConfig, scn, lcrPosition, snapshotScn, snapshotPendingTransactions, snapshot, snapshotCompleted, transactionContext, incrementalSnapshotContext); } } public static Builder create() { return new Builder(); } @Override public Map<String, ?> getOffset() { if (sourceInfo.isSnapshot()) { Map<String, Object> offset = new HashMap<>(); final Scn scn = sourceInfo.getScn(); offset.put(SourceInfo.SCN_KEY, scn != null ? scn.toString() : scn); offset.put(SourceInfo.SNAPSHOT_KEY, true); offset.put(SNAPSHOT_COMPLETED_KEY, snapshotCompleted); if (snapshotPendingTransactions != null) { String encoded = snapshotPendingTransactions.entrySet().stream() .map(e -> e.getKey() + ":" + e.getValue().toString()) .collect(Collectors.joining(",")); offset.put(SNAPSHOT_PENDING_TRANSACTIONS_KEY, encoded); } offset.put(SNAPSHOT_SCN_KEY, snapshotScn != null ? snapshotScn.toString() : null); return offset; } else { final Map<String, Object> offset = new HashMap<>(); if (sourceInfo.getLcrPosition() != null) { offset.put(SourceInfo.LCR_POSITION_KEY, sourceInfo.getLcrPosition()); } else { final Scn scn = sourceInfo.getScn(); final Scn commitScn = sourceInfo.getCommitScn(); offset.put(SourceInfo.SCN_KEY, scn != null ? scn.toString() : null); offset.put(SourceInfo.COMMIT_SCN_KEY, commitScn != null ? commitScn.toString() : null); } if (snapshotPendingTransactions != null) { String encoded = snapshotPendingTransactions.entrySet().stream() .map(e -> e.getKey() + ":" + e.getValue().toString()) .collect(Collectors.joining(",")); offset.put(SNAPSHOT_PENDING_TRANSACTIONS_KEY, encoded); } offset.put(SNAPSHOT_SCN_KEY, snapshotScn != null ? snapshotScn.toString() : null); return incrementalSnapshotContext.store(transactionContext.store(offset)); } } @Override public Schema getSourceInfoSchema() { return sourceInfoSchema; } @Override public Struct getSourceInfo() { return sourceInfo.struct(); } public void setScn(Scn scn) { sourceInfo.setScn(scn); } public void setCommitScn(Scn commitScn) { sourceInfo.setCommitScn(commitScn); } public Scn getScn() { return sourceInfo.getScn(); } public Scn getCommitScn() { return sourceInfo.getCommitScn(); } public void setLcrPosition(String lcrPosition) { sourceInfo.setLcrPosition(lcrPosition); } public String getLcrPosition() { return sourceInfo.getLcrPosition(); } public Scn getSnapshotScn() { return snapshotScn; } public Map<String, Scn> getSnapshotPendingTransactions() { return snapshotPendingTransactions; } public void setSnapshotPendingTransactions(Map<String, Scn> snapshotPendingTransactions) { this.snapshotPendingTransactions = snapshotPendingTransactions; } public void setTransactionId(String transactionId) { sourceInfo.setTransactionId(transactionId); } public void setSourceTime(Instant instant) { sourceInfo.setSourceTime(instant); } public void setTableId(TableId tableId) { sourceInfo.tableEvent(tableId); } @Override public boolean isSnapshotRunning() { return sourceInfo.isSnapshot() && !snapshotCompleted; } @Override public void preSnapshotStart() { sourceInfo.setSnapshot(SnapshotRecord.TRUE); snapshotCompleted = false; } @Override public void preSnapshotCompletion() { snapshotCompleted = true; } @Override public void postSnapshotCompletion() { sourceInfo.setSnapshot(SnapshotRecord.FALSE); } @Override public String toString() { StringBuilder sb = new StringBuilder("OracleOffsetContext [scn=").append(getScn()); if (sourceInfo.isSnapshot()) { sb.append(", snapshot=").append(sourceInfo.isSnapshot()); sb.append(", snapshot_completed=").append(snapshotCompleted); } sb.append("]"); return sb.toString(); } @Override public void markLastSnapshotRecord() { sourceInfo.setSnapshot(SnapshotRecord.LAST); } @Override public void event(DataCollectionId tableId, Instant timestamp) { sourceInfo.tableEvent((TableId) tableId); sourceInfo.setSourceTime(timestamp); } public void tableEvent(TableId tableId, Instant timestamp) { sourceInfo.setSourceTime(timestamp); sourceInfo.tableEvent(tableId); } public void tableEvent(Set<TableId> tableIds, Instant timestamp) { sourceInfo.setSourceTime(timestamp); sourceInfo.tableEvent(tableIds); } @Override public TransactionContext getTransactionContext() { return transactionContext; } @Override public void incrementalSnapshotEvents() { sourceInfo.setSnapshot(SnapshotRecord.INCREMENTAL); } @Override public IncrementalSnapshotContext<?> getIncrementalSnapshotContext() { return incrementalSnapshotContext; } /** * Helper method to resolve a {@link Scn} by key from the offset map. * * @param offset the offset map * @param key the entry key, either {@link SourceInfo#SCN_KEY} or {@link SourceInfo#COMMIT_SCN_KEY}. * @return the {@link Scn} or null if not found */ public static Scn getScnFromOffsetMapByKey(Map<String, ?> offset, String key) { Object scn = offset.get(key); if (scn instanceof String) { return Scn.valueOf((String) scn); } else if (scn != null) { return Scn.valueOf((Long) scn); } return null; } /** * Helper method to read the in-progress transaction map from the offset map. * * @param offset the offset map * @return the in-progress transaction map */ public static Map<String, Scn> loadSnapshotPendingTransactions(Map<String, ?> offset) { Map<String, Scn> snapshotPendingTransactions = new HashMap<>(); String encoded = (String) offset.get(SNAPSHOT_PENDING_TRANSACTIONS_KEY); if (encoded != null) { Arrays.stream(encoded.split(",")) .map(String::trim) .filter(s -> !s.isEmpty()) .forEach(e -> { String[] parts = e.split(":", 2); String txid = parts[0]; Scn startScn = Scn.valueOf(parts[1]); snapshotPendingTransactions.put(txid, startScn); }); } return snapshotPendingTransactions; } /** * Helper method to read the snapshot SCN from the offset map. * * @param offset the offset map * @return the snapshot SCN */ public static Scn loadSnapshotScn(Map<String, ?> offset) { return getScnFromOffsetMapByKey(offset, SNAPSHOT_SCN_KEY); } }
DBZ-4635 Prevent NPE from offsets during Oracle connector upgrade
debezium-connector-oracle/src/main/java/io/debezium/connector/oracle/OracleOffsetContext.java
DBZ-4635 Prevent NPE from offsets during Oracle connector upgrade
Java
apache-2.0
d3a85095ebf9e91a33c3c346ba05af1e8ae60a6e
0
rundeck/rundeck-cli,rundeck/rundeck-cli
/* * Copyright 2017 Rundeck, Inc. (http://rundeck.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rundeck.client.tool; import org.rundeck.client.RundeckClient; import org.rundeck.client.api.RequestFailed; import org.rundeck.client.api.RundeckApi; import org.rundeck.client.api.model.DateInfo; import org.rundeck.client.api.model.Execution; import org.rundeck.client.api.model.JobItem; import org.rundeck.client.api.model.scheduler.ScheduledJobItem; import org.rundeck.client.tool.commands.*; import org.rundeck.client.tool.extension.RdCommandExtension; import org.rundeck.client.tool.util.AdaptedToolbeltOutput; import org.rundeck.client.tool.util.ExtensionLoaderUtil; import org.rundeck.client.util.*; import org.rundeck.toolbelt.*; import org.rundeck.toolbelt.format.json.jackson.JsonFormatter; import org.rundeck.toolbelt.format.yaml.snakeyaml.YamlFormatter; import org.rundeck.toolbelt.input.jewelcli.JewelInput; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.nodes.Tag; import org.yaml.snakeyaml.representer.Representer; import java.io.PrintWriter; import java.io.StringWriter; import java.util.*; import java.util.function.Function; import static org.rundeck.client.RundeckClient.ENV_INSECURE_SSL; import static org.rundeck.client.RundeckClient.ENV_INSECURE_SSL_NO_WARN; /** * Entrypoint for commandline */ public class Main { public static final String ENV_USER = "RD_USER"; public static final String ENV_PASSWORD = "RD_PASSWORD"; public static final String ENV_TOKEN = "RD_TOKEN"; public static final String ENV_URL = "RD_URL"; public static final String ENV_API_VERSION = "RD_API_VERSION"; public static final String ENV_AUTH_PROMPT = "RD_AUTH_PROMPT"; public static final String ENV_DEBUG = "RD_DEBUG"; public static final String ENV_RD_FORMAT = "RD_FORMAT"; public static final String USER_AGENT = RundeckClient.Builder.getUserAgent("rd-cli-tool/" + org.rundeck.client.Version.VERSION); public static void main(String[] args) throws CommandRunFailure { Rd rd = new Rd(new Env()); Tool tool = tool(rd); boolean success = false; try { success = tool.runMain(args, false); } catch (RequestFailed failure) { rd.getOutput().error(failure.getMessage()); if (rd.getDebugLevel() > 0) { StringWriter sb = new StringWriter(); failure.printStackTrace(new PrintWriter(sb)); rd.getOutput().error(sb.toString()); } } if (!success) { System.exit(2); } } private static void setupFormat(final ToolBelt belt, RdClientConfig config) { final String format = config.get(ENV_RD_FORMAT); if ("yaml".equalsIgnoreCase(format)) { configYamlFormat(belt, config); } else if ("json".equalsIgnoreCase(format)) { configJsonFormat(belt); } else { if (null != format) { belt.finalOutput().warning(String.format("# WARNING: Unknown value for %s: %s", ENV_RD_FORMAT, format)); } configNiceFormat(belt); } } private static void configNiceFormat(final ToolBelt belt) { NiceFormatter formatter = new NiceFormatter(null) { @Override public String format(final Object o) { if (o instanceof DataOutput) { DataOutput o1 = (DataOutput) o; Map<?, ?> map = o1.asMap(); if (null != map) { return super.format(map); } List<?> objects = o1.asList(); if (null != objects) { return super.format(objects); } } return super.format(o); } }; formatter.setCollectionIndicator(""); belt.formatter(formatter); belt.channels().info(new FormattedOutput( belt.defaultOutput(), new PrefixFormatter("# ", belt.defaultBaseFormatter()) )); } private static void configJsonFormat(final ToolBelt belt) { belt.formatter(new JsonFormatter(DataOutputAsFormatable)); belt.channels().infoEnabled(false); belt.channels().warningEnabled(false); belt.channels().errorEnabled(false); } private static void configYamlFormat(final ToolBelt belt, final RdClientConfig config) { DumperOptions dumperOptions = new DumperOptions(); dumperOptions.setDefaultFlowStyle( "BLOCK".equalsIgnoreCase(config.getString("RD_YAML_FLOW", "BLOCK")) ? DumperOptions.FlowStyle.BLOCK : DumperOptions.FlowStyle.FLOW ); dumperOptions.setPrettyFlow(config.getBool("RD_YAML_PRETTY", true)); Representer representer = new Representer(); representer.addClassTag(JobItem.class, Tag.MAP); representer.addClassTag(ScheduledJobItem.class, Tag.MAP); representer.addClassTag(DateInfo.class, Tag.MAP); representer.addClassTag(Execution.class, Tag.MAP); belt.formatter(new YamlFormatter(DataOutputAsFormatable, new Yaml(representer, dumperOptions))); belt.channels().infoEnabled(false); belt.channels().warningEnabled(false); belt.channels().errorEnabled(false); } private static final Function<Object, Optional<Formatable>> DataOutputAsFormatable = o -> { if (o instanceof DataOutput) { return Optional.of(new Formatable() { @Override public List<?> asList() { return ((DataOutput) o).asList(); } @Override public Map<?, ?> asMap() { return ((DataOutput) o).asMap(); } }); } return Optional.empty(); }; public static Tool tool(final Rd rd) { List<Object> base = new ArrayList<>(Arrays.asList( new Adhoc(rd), new Jobs(rd), new Projects(rd), new Executions(rd), new Run(rd), new Keys(rd), new RDSystem(rd), new Scheduler(rd), new Tokens(rd), new Nodes(rd), new Users(rd), new Something(), new Retry(rd), new Metrics(rd), new Version() )); AppCommand commandTool = new AppCommand(rd); List<RdCommandExtension> extensions = ExtensionLoaderUtil.list(); extensions.forEach(ext -> ext.setRdTool(commandTool)); base.addAll(extensions); ToolBelt belt = ToolBelt.belt("rd") .defaultHelpCommands() .ansiColorOutput(rd.isAnsiEnabled()) .add(base.toArray()) .bannerResource("rd-banner.txt") .commandInput(new JewelInput()); belt.printStackTrace(rd.getDebugLevel() > 0); setupColor(belt, rd); setupFormat(belt, rd); boolean insecureSsl = Boolean.parseBoolean(System.getProperty( "rundeck.client.insecure.ssl", System.getenv(ENV_INSECURE_SSL) )); boolean insecureSslNoWarn = Boolean.parseBoolean(System.getenv(ENV_INSECURE_SSL_NO_WARN)); if (insecureSsl && !insecureSslNoWarn ) { belt.finalOutput().warning( "# WARNING: RD_INSECURE_SSL=true, no hostname or certificate trust verification will be performed"); } belt.handles(InputError.class, (err, context) -> { context.getOutput().warning(String.format( "Input error for [%s]: %s", context.getCommandsString(), err.getMessage() )); context.getOutput().warning(String.format( "You can use: \"%s %s\" to get help.", context.getCommandsString(), "-h" )); return true; }); rd.setOutput(new AdaptedToolbeltOutput(belt.finalOutput())); if (rd.getDebugLevel() > 0) { extensions.forEach(ext -> { rd.getOutput().warning("# Including extension: " + ext.getClass().getName()); }); } return belt.buckle(); } static class Rd extends ExtConfigSource implements RdApp, RdClientConfig { Client<RundeckApi> client; private CommandOutput output; public Rd(final ConfigSource src) { super(src); } public boolean isAnsiEnabled() { String term = getString("TERM", null); String rd_color = getString("RD_COLOR", null); return "1".equals(rd_color) || ( term != null && term.contains("color") && !"0".equals(rd_color) ); } @Override public int getDebugLevel() { return getInt(ENV_DEBUG, 0); } public String getDateFormat() { return getString("RD_DATE_FORMAT", "yyyy-MM-dd'T'HH:mm:ssXX"); } @Override public Client<RundeckApi> getClient() throws InputError { if (null == client) { try { client = Main.createClient(this); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } } return client; } @Override public Client<RundeckApi> getClient(final int version) throws InputError { try { client = Main.createClient(this, version); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } return client; } @Override public <T> ServiceClient<T> getClient(final Class<T> api, final int version) throws InputError { try { return Main.createClient(this, api, version); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } } @Override public <T> ServiceClient<T> getClient(final Class<T> api) throws InputError { try { return Main.createClient(this, api, null); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } } @Override public RdClientConfig getAppConfig() { return this; } public void versionDowngradeWarning(int requested, int supported) { getOutput().warning(String.format( "# WARNING: API Version Downgraded: %d -> %d", requested, supported )); getOutput().warning(String.format( "# WARNING: To avoid this warning, specify the API version via RD_URL: " + "export RD_URL=%sapi/%s", client.getAppBaseUrl(), supported )); getOutput().warning("# WARNING: To disable downgrading: " + "export RD_API_DOWNGRADE=false"); } public CommandOutput getOutput() { return output; } public void setOutput(CommandOutput output) { this.output = output; } } private static void setupColor(final ToolBelt belt, RdClientConfig config) { if (config.isAnsiEnabled()) { String info = config.get("RD_COLOR_INFO"); if (null != info) { belt.ansiColor().info(info); } String output = config.get("RD_COLOR_OUTPUT"); if (null != output) { belt.ansiColor().output(output); } String warn = config.get("RD_COLOR_WARN"); if (null != warn) { belt.ansiColor().warning(warn); } String error = config.get("RD_COLOR_ERROR"); if (null != error) { belt.ansiColor().error(error); } } } public static Client<RundeckApi> createClient(Rd config) throws ConfigSource.ConfigSourceError { return createClient(config, RundeckApi.class, null); } public static <T> Client<T> createClient(Rd config, Class<T> api) throws ConfigSource.ConfigSourceError { return createClient(config, api, null); } public static Client<RundeckApi> createClient(Rd config, Integer requestedVersion) throws ConfigSource.ConfigSourceError { return createClient(config, RundeckApi.class, requestedVersion); } public static <T> Client<T> createClient(Rd config, Class<T> api, Integer requestedVersion) throws ConfigSource.ConfigSourceError { Auth auth = new Auth() { }; auth = auth.chain(new ConfigAuth(config)); String baseUrl = config.require( ENV_URL, "Please specify the Rundeck base URL, e.g. http://host:port or http://host:port/api/14" ); if (!auth.isConfigured() && config.getBool(ENV_AUTH_PROMPT, true) && null != System.console()) { auth = auth.chain(new ConsoleAuth(String.format("Credentials for URL: %s", baseUrl)).memoize()); } RundeckClient.Builder<T> builder = RundeckClient.builder(api) .baseUrl(baseUrl) .config(config); if (null != requestedVersion) { builder.apiVersion(requestedVersion); } else { int anInt = config.getInt(ENV_API_VERSION, -1); if (anInt > 0) { builder.apiVersion(anInt); } } if (auth.isTokenAuth()) { builder.tokenAuth(auth.getToken()); } else { if (null == auth.getUsername() || "".equals(auth.getUsername().trim())) { throw new IllegalArgumentException("Username or token must be entered, or use environment variable " + ENV_USER + " or " + ENV_TOKEN); } if (null == auth.getPassword() || "".equals(auth.getPassword().trim())) { throw new IllegalArgumentException("Password must be entered, or use environment variable " + ENV_PASSWORD); } builder.passwordAuth(auth.getUsername(), auth.getPassword()); } builder.logger(new OutputLogger(config.getOutput())); builder.userAgent("rd-cli-tool/" + org.rundeck.client.Version.VERSION); return builder.build(); } interface Auth { default boolean isConfigured() { return null != getToken() || ( null != getUsername() && null != getPassword() ); } default String getUsername() { return null; } default String getPassword() { return null; } default String getToken() { return null; } default boolean isTokenAuth() { String username = getUsername(); if (null != username && !"".equals(username.trim())) { return false; } String token = getToken(); return null != token && !"".equals(token); } default Auth chain(Auth auth) { return new ChainAuth(Arrays.asList(this, auth)); } default Auth memoize() { return new MemoAuth(this); } } static class ConfigAuth implements Auth { final ConfigSource config; public ConfigAuth(final ConfigSource config) { this.config = config; } @Override public String getUsername() { return config.get(ENV_USER); } @Override public String getPassword() { return config.get(ENV_PASSWORD); } @Override public String getToken() { return config.get(ENV_TOKEN); } } static class ConsoleAuth implements Auth { String username; String pass; String token; final String header; boolean echoHeader; public ConsoleAuth(final String header) { this.header = header; echoHeader = false; } @Override public String getUsername() { echo(); return System.console().readLine("Enter username (blank for token auth): "); } private void echo() { if (!echoHeader) { if (null != header) { System.out.println(header); } echoHeader = true; } } @Override public String getPassword() { echo(); char[] chars = System.console().readPassword("Enter password: "); return new String(chars); } @Override public String getToken() { echo(); char[] chars = System.console().readPassword("Enter auth token: "); return new String(chars); } } static class ChainAuth implements Auth { final Collection<Auth> chain; public ChainAuth(final Collection<Auth> chain) { this.chain = chain; } @Override public String getUsername() { return findFirst(Auth::getUsername); } private String findFirst(Function<Auth, String> func) { for (Auth auth : chain) { String user = func.apply(auth); if (null != user) { return user; } } return null; } @Override public String getPassword() { return findFirst(Auth::getPassword); } @Override public String getToken() { return findFirst(Auth::getToken); } } static class MemoAuth implements Auth { final Auth auth; public MemoAuth(final Auth auth) { this.auth = auth; } String username; boolean usermemo = false; String pass; boolean passmemo = false; String token; boolean tokenmemo = false; @Override public String getUsername() { if (usermemo) { return username; } username = auth.getUsername(); usermemo = true; return username; } @Override public String getPassword() { if (passmemo) { return pass; } pass = auth.getPassword(); passmemo = true; return pass; } @Override public String getToken() { if (tokenmemo) { return token; } token = auth.getToken(); tokenmemo = true; return token; } } @Hidden @Command("pond") public static class Something { @Command public void pond(org.rundeck.toolbelt.CommandOutput out) { int i = new Random().nextInt(4); ANSIColorOutput.ColorString kind; switch (i) { case 1: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.BLUE, "A little luck."); break; case 2: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.GREEN, "Good luck."); break; case 3: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.ORANGE, "Great luck."); break; default: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.RESET, "Big trouble."); break; } out.output("For your reference, today you will have:"); out.output(kind); } } private static class OutputLogger implements Client.Logger { final CommandOutput output; public OutputLogger(final org.rundeck.toolbelt.CommandOutput output) { this.output = new AdaptedToolbeltOutput(output); } public OutputLogger(final CommandOutput output) { this.output = output; } @Override public void output(final String out) { output.output(out); } @Override public void warning(final String warn) { output.warning(warn); } @Override public void error(final String err) { output.error(err); } } }
rd-cli-tool/src/main/java/org/rundeck/client/tool/Main.java
/* * Copyright 2017 Rundeck, Inc. (http://rundeck.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rundeck.client.tool; import org.rundeck.client.RundeckClient; import org.rundeck.client.api.RequestFailed; import org.rundeck.client.api.RundeckApi; import org.rundeck.client.api.model.DateInfo; import org.rundeck.client.api.model.Execution; import org.rundeck.client.api.model.JobItem; import org.rundeck.client.api.model.scheduler.ScheduledJobItem; import org.rundeck.client.tool.commands.*; import org.rundeck.client.tool.extension.RdCommandExtension; import org.rundeck.client.tool.util.AdaptedToolbeltOutput; import org.rundeck.client.tool.util.ExtensionLoaderUtil; import org.rundeck.client.util.*; import org.rundeck.toolbelt.*; import org.rundeck.toolbelt.format.json.jackson.JsonFormatter; import org.rundeck.toolbelt.format.yaml.snakeyaml.YamlFormatter; import org.rundeck.toolbelt.input.jewelcli.JewelInput; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.nodes.Tag; import org.yaml.snakeyaml.representer.Representer; import java.io.PrintWriter; import java.io.StringWriter; import java.util.*; import java.util.function.Function; import static org.rundeck.client.RundeckClient.ENV_INSECURE_SSL; import static org.rundeck.client.RundeckClient.ENV_INSECURE_SSL_NO_WARN; /** * Entrypoint for commandline */ public class Main { public static final String ENV_USER = "RD_USER"; public static final String ENV_PASSWORD = "RD_PASSWORD"; public static final String ENV_TOKEN = "RD_TOKEN"; public static final String ENV_URL = "RD_URL"; public static final String ENV_API_VERSION = "RD_API_VERSION"; public static final String ENV_AUTH_PROMPT = "RD_AUTH_PROMPT"; public static final String ENV_DEBUG = "RD_DEBUG"; public static final String ENV_RD_FORMAT = "RD_FORMAT"; public static final String USER_AGENT = RundeckClient.Builder.getUserAgent("rd-cli-tool/" + org.rundeck.client.Version.VERSION); public static void main(String[] args) throws CommandRunFailure { Rd rd = new Rd(new Env()); Tool tool = tool(rd); boolean success = false; try { success = tool.runMain(args, false); } catch (RequestFailed failure) { rd.getOutput().error(failure.getMessage()); if (rd.getDebugLevel() > 0) { StringWriter sb = new StringWriter(); failure.printStackTrace(new PrintWriter(sb)); rd.getOutput().error(sb.toString()); } } if (!success) { System.exit(2); } } private static void setupFormat(final ToolBelt belt, RdClientConfig config) { final String format = config.get(ENV_RD_FORMAT); if ("yaml".equalsIgnoreCase(format)) { configYamlFormat(belt, config); } else if ("json".equalsIgnoreCase(format)) { configJsonFormat(belt); } else { if (null != format) { belt.finalOutput().warning(String.format("# WARNING: Unknown value for %s: %s", ENV_RD_FORMAT, format)); } configNiceFormat(belt); } } private static void configNiceFormat(final ToolBelt belt) { NiceFormatter formatter = new NiceFormatter(null) { @Override public String format(final Object o) { if (o instanceof DataOutput) { DataOutput o1 = (DataOutput) o; Map<?, ?> map = o1.asMap(); if (null != map) { return super.format(map); } List<?> objects = o1.asList(); if (null != objects) { return super.format(objects); } } return super.format(o); } }; formatter.setCollectionIndicator(""); belt.formatter(formatter); belt.channels().info(new FormattedOutput( belt.defaultOutput(), new PrefixFormatter("# ", belt.defaultBaseFormatter()) )); } private static void configJsonFormat(final ToolBelt belt) { belt.formatter(new JsonFormatter(DataOutputAsFormatable)); belt.channels().infoEnabled(false); belt.channels().warningEnabled(false); belt.channels().errorEnabled(false); } private static void configYamlFormat(final ToolBelt belt, final RdClientConfig config) { DumperOptions dumperOptions = new DumperOptions(); dumperOptions.setDefaultFlowStyle( "BLOCK".equalsIgnoreCase(config.getString("RD_YAML_FLOW", "BLOCK")) ? DumperOptions.FlowStyle.BLOCK : DumperOptions.FlowStyle.FLOW ); dumperOptions.setPrettyFlow(config.getBool("RD_YAML_PRETTY", true)); Representer representer = new Representer(); representer.addClassTag(JobItem.class, Tag.MAP); representer.addClassTag(ScheduledJobItem.class, Tag.MAP); representer.addClassTag(DateInfo.class, Tag.MAP); representer.addClassTag(Execution.class, Tag.MAP); belt.formatter(new YamlFormatter(DataOutputAsFormatable, new Yaml(representer, dumperOptions))); belt.channels().infoEnabled(false); belt.channels().warningEnabled(false); belt.channels().errorEnabled(false); } private static final Function<Object, Optional<Formatable>> DataOutputAsFormatable = o -> { if (o instanceof DataOutput) { return Optional.of(new Formatable() { @Override public List<?> asList() { return ((DataOutput) o).asList(); } @Override public Map<?, ?> asMap() { return ((DataOutput) o).asMap(); } }); } return Optional.empty(); }; public static Tool tool(final Rd rd) { List<Object> base = new ArrayList<>(Arrays.asList( new Adhoc(rd), new Jobs(rd), new Projects(rd), new Executions(rd), new Run(rd), new Keys(rd), new RDSystem(rd), new Scheduler(rd), new Tokens(rd), new Nodes(rd), new Users(rd), new Something(), new Retry(rd), new Metrics(rd), new Version() )); AppCommand commandTool = new AppCommand(rd); List<RdCommandExtension> extensions = ExtensionLoaderUtil.list(); extensions.forEach(ext -> ext.setRdTool(commandTool)); base.addAll(extensions); ToolBelt belt = ToolBelt.belt("rd") .defaultHelpCommands() .ansiColorOutput(rd.isAnsiEnabled()) .add(base.toArray()) .bannerResource("rd-banner.txt") .commandInput(new JewelInput()); belt.printStackTrace(rd.getDebugLevel() > 0); setupColor(belt, rd); setupFormat(belt, rd); boolean insecureSsl = Boolean.parseBoolean(System.getProperty( "rundeck.client.insecure.ssl", System.getenv(ENV_INSECURE_SSL) )); boolean insecureSslNoWarn = Boolean.parseBoolean(System.getenv(ENV_INSECURE_SSL_NO_WARN)); if (insecureSsl && !insecureSslNoWarn ) { belt.finalOutput().warning( "# WARNING: RD_INSECURE_SSL=true, no hostname or certificate trust verification will be performed"); } belt.handles(InputError.class, (err, context) -> { context.getOutput().warning(String.format( "Input error for [%s]: %s", context.getCommandsString(), err.getMessage() )); context.getOutput().warning(String.format( "You can use: \"%s %s\" to get help.", context.getCommandsString(), "-h" )); return true; }); rd.setOutput(new AdaptedToolbeltOutput(belt.finalOutput())); if (rd.getDebugLevel() > 0) { extensions.forEach(ext -> { rd.getOutput().warning("# Including extension: " + ext.getClass().getName()); }); } return belt.buckle(); } static class Rd extends ExtConfigSource implements RdApp, RdClientConfig { Client<RundeckApi> client; private CommandOutput output; public Rd(final ConfigSource src) { super(src); } public boolean isAnsiEnabled() { String term = getString("TERM", null); String rd_color = getString("RD_COLOR", null); return "1".equals(rd_color) || ( term != null && term.contains("color") && !"0".equals(rd_color) ); } @Override public int getDebugLevel() { return getInt(ENV_DEBUG, 0); } public String getDateFormat() { return getString("RD_DATE_FORMAT", "yyyy-MM-dd'T'HH:mm:ssXX"); } @Override public Client<RundeckApi> getClient() throws InputError { if (null == client) { try { client = Main.createClient(this); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } } return client; } @Override public Client<RundeckApi> getClient(final int version) throws InputError { try { client = Main.createClient(this, version); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } return client; } @Override public <T> ServiceClient<T> getClient(final Class<T> api, final int version) throws InputError { try { return Main.createClient(this, api, version); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } } @Override public <T> ServiceClient<T> getClient(final Class<T> api) throws InputError { try { return Main.createClient(this, api, null); } catch (ConfigSourceError configSourceError) { throw new InputError(configSourceError.getMessage()); } } @Override public RdClientConfig getAppConfig() { return this; } public void versionDowngradeWarning(int requested, int supported) { getOutput().warning(String.format( "# WARNING: API Version Downgraded: %d -> %d", requested, supported )); getOutput().warning(String.format( "# WARNING: To avoid this warning, specify the API version via RD_URL: " + "export RD_URL=%sapi/%s", client.getAppBaseUrl(), supported )); getOutput().warning("# WARNING: To disable downgrading: " + "export RD_API_DOWNGRADE=false"); } public CommandOutput getOutput() { return output; } public void setOutput(CommandOutput output) { this.output = output; } } private static void setupColor(final ToolBelt belt, RdClientConfig config) { if (config.isAnsiEnabled()) { String info = config.get("RD_COLOR_INFO"); if (null != info) { belt.ansiColor().info(info); } String output = config.get("RD_COLOR_OUTPUT"); if (null != output) { belt.ansiColor().output(output); } String warn = config.get("RD_COLOR_WARN"); if (null != warn) { belt.ansiColor().warning(warn); } String error = config.get("RD_COLOR_ERROR"); if (null != error) { belt.ansiColor().error(error); } } } public static Client<RundeckApi> createClient(Rd config) throws ConfigSource.ConfigSourceError { return createClient(config, RundeckApi.class, null); } public static <T> Client<T> createClient(Rd config, Class<T> api) throws ConfigSource.ConfigSourceError { return createClient(config, api, null); } public static Client<RundeckApi> createClient(Rd config, Integer requestedVersion) throws ConfigSource.ConfigSourceError { return createClient(config, RundeckApi.class, requestedVersion); } public static <T> Client<T> createClient(Rd config, Class<T> api, Integer requestedVersion) throws ConfigSource.ConfigSourceError { Auth auth = new Auth() { }; auth = auth.chain(new ConfigAuth(config)); String baseUrl = config.require( ENV_URL, "Please specify the Rundeck base URL, e.g. http://host:port or http://host:port/api/14" ); if (!auth.isConfigured() && config.getBool(ENV_AUTH_PROMPT, true) && null != System.console()) { auth = auth.chain(new ConsoleAuth(String.format("Credentials for URL: %s", baseUrl)).memoize()); } RundeckClient.Builder<T> builder = RundeckClient.builder(api) .baseUrl(baseUrl) .config(config); if (null != requestedVersion) { builder.apiVersion(requestedVersion); } else { int anInt = config.getInt(ENV_API_VERSION, -1); if (anInt > 0) { builder.apiVersion(anInt); } } if (auth.isTokenAuth()) { builder.tokenAuth(auth.getToken()); } else { if (null == auth.getUsername() || "".equals(auth.getUsername().trim())) { throw new IllegalArgumentException("Username or token must be entered, or use environment variable " + ENV_USER + " or " + ENV_TOKEN); } if (null == auth.getPassword() || "".equals(auth.getPassword().trim())) { throw new IllegalArgumentException("Password must be entered, or use environment variable " + ENV_PASSWORD); } builder.passwordAuth(auth.getUsername(), auth.getPassword()); } builder.logger(new OutputLogger(config.getOutput())); builder.userAgent("rd-cli-tool/" + org.rundeck.client.Version.VERSION); return builder.build(); } interface Auth { default boolean isConfigured() { return null != getToken() || ( null != getUsername() && null != getPassword() ); } default String getUsername() { return null; } default String getPassword() { return null; } default String getToken() { return null; } default boolean isTokenAuth() { String username = getUsername(); if (null != username && !"".equals(username.trim())) { return false; } String token = getToken(); return null != token && !"".equals(token); } default Auth chain(Auth auth) { return new ChainAuth(Arrays.asList(this, auth)); } default Auth memoize() { return new MemoAuth(this); } } static class ConfigAuth implements Auth { final ConfigSource config; public ConfigAuth(final ConfigSource config) { this.config = config; } @Override public String getUsername() { return config.get(ENV_USER); } @Override public String getPassword() { return config.get(ENV_PASSWORD); } @Override public String getToken() { return config.get(ENV_TOKEN); } } static class ConsoleAuth implements Auth { String username; String pass; String token; final String header; boolean echoHeader; public ConsoleAuth(final String header) { this.header = header; echoHeader = false; } @Override public String getUsername() { echo(); return System.console().readLine("Enter username (blank for token auth): "); } private void echo() { if (!echoHeader) { if (null != header) { System.out.println(header); } echoHeader = true; } } @Override public String getPassword() { echo(); char[] chars = System.console().readPassword("Enter password: "); return new String(chars); } @Override public String getToken() { echo(); char[] chars = System.console().readPassword("Enter auth token: "); return new String(chars); } } static class ChainAuth implements Auth { final Collection<Auth> chain; public ChainAuth(final Collection<Auth> chain) { this.chain = chain; } @Override public String getUsername() { return findFirst(Auth::getUsername); } private String findFirst(Function<Auth, String> func) { for (Auth auth : chain) { String user = func.apply(auth); if (null != user) { return user; } } return null; } @Override public String getPassword() { return findFirst(Auth::getPassword); } @Override public String getToken() { return findFirst(Auth::getToken); } } static class MemoAuth implements Auth { final Auth auth; public MemoAuth(final Auth auth) { this.auth = auth; } String username; boolean usermemo = false; String pass; boolean passmemo = false; String token; boolean tokenmemo = false; @Override public String getUsername() { if (usermemo) { return username; } username = auth.getUsername(); usermemo = true; return username; } @Override public String getPassword() { if (passmemo) { return pass; } pass = auth.getPassword(); passmemo = true; return pass; } @Override public String getToken() { if (tokenmemo) { return token; } token = auth.getToken(); tokenmemo = true; return token; } } @Hidden @Command("pond") public static class Something { @Command public void pond(CommandOutput out) { int i = new Random().nextInt(4); ANSIColorOutput.ColorString kind; switch (i) { case 1: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.BLUE, "A little luck."); break; case 2: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.GREEN, "Good luck."); break; case 3: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.ORANGE, "Great luck."); break; default: kind = ANSIColorOutput.colorize(ANSIColorOutput.Color.RESET, "Big trouble."); break; } out.output("For your reference, today you will have:"); out.output(kind); } } private static class OutputLogger implements Client.Logger { final CommandOutput output; public OutputLogger(final org.rundeck.toolbelt.CommandOutput output) { this.output = new AdaptedToolbeltOutput(output); } public OutputLogger(final CommandOutput output) { this.output = output; } @Override public void output(final String out) { output.output(out); } @Override public void warning(final String warn) { output.warning(warn); } @Override public void error(final String err) { output.error(err); } } }
Fix output for internal command
rd-cli-tool/src/main/java/org/rundeck/client/tool/Main.java
Fix output for internal command
Java
apache-2.0
82dd4f67af3d99691d787b99743538ba7365ec79
0
phimpme/android-prototype,phimpme/android-prototype,phimpme/android-prototype,phimpme/android-prototype
package org.fossasia.phimpme.gallery.activities; import android.animation.ArgbEvaluator; import android.animation.ValueAnimator; import android.annotation.TargetApi; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.res.Configuration; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.graphics.PorterDuff; import android.media.MediaScannerConnection; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.provider.MediaStore; import android.provider.Settings; import android.speech.RecognizerIntent; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.Snackbar; import android.support.v4.content.ContextCompat; import android.support.v4.print.PrintHelper; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatDelegate; import android.support.v7.widget.ActionMenuView; import android.support.v7.widget.CardView; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.Toolbar; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.transition.ChangeBounds; import android.util.DisplayMetrics; import android.util.Log; import android.view.Display; import android.view.Menu; import android.view.MenuItem; import android.view.Surface; import android.view.View; import android.view.WindowManager; import android.view.animation.AccelerateInterpolator; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.view.animation.DecelerateInterpolator; import android.widget.EditText; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import android.widget.ViewSwitcher; import com.bumptech.glide.Glide; import com.mikepenz.community_material_typeface_library.CommunityMaterial; import com.mikepenz.iconics.view.IconicsImageView; import com.yalantis.ucrop.UCrop; import org.fossasia.phimpme.R; import org.fossasia.phimpme.base.SharedMediaActivity; import org.fossasia.phimpme.base.ThemedActivity; import org.fossasia.phimpme.data.local.DatabaseHelper; import org.fossasia.phimpme.data.local.FavouriteImagesModel; import org.fossasia.phimpme.data.local.ImageDescModel; import org.fossasia.phimpme.data.local.TrashBinRealmModel; import org.fossasia.phimpme.data.local.UploadHistoryRealmModel; import org.fossasia.phimpme.editor.CompressImageActivity; import org.fossasia.phimpme.editor.EditImageActivity; import org.fossasia.phimpme.editor.FileUtils; import org.fossasia.phimpme.editor.utils.BitmapUtils; import org.fossasia.phimpme.gallery.SelectAlbumBottomSheet; import org.fossasia.phimpme.gallery.adapters.ImageAdapter; import org.fossasia.phimpme.gallery.data.Album; import org.fossasia.phimpme.gallery.data.AlbumSettings; import org.fossasia.phimpme.gallery.data.Media; import org.fossasia.phimpme.gallery.data.base.MediaDetailsMap; import org.fossasia.phimpme.gallery.util.AlertDialogsHelper; import org.fossasia.phimpme.gallery.util.ColorPalette; import org.fossasia.phimpme.gallery.util.ContentHelper; import org.fossasia.phimpme.gallery.util.Measure; import org.fossasia.phimpme.gallery.util.PreferenceUtil; import org.fossasia.phimpme.gallery.util.SecurityHelper; import org.fossasia.phimpme.gallery.util.StringUtils; import org.fossasia.phimpme.gallery.util.ThemeHelper; import org.fossasia.phimpme.gallery.views.PagerRecyclerView; import org.fossasia.phimpme.share.SharingActivity; import org.fossasia.phimpme.utilities.ActivitySwitchHelper; import org.fossasia.phimpme.utilities.BasicCallBack; import org.fossasia.phimpme.utilities.SnackBarHandler; import java.io.File; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import butterknife.BindView; import butterknife.ButterKnife; import io.realm.Realm; import io.realm.RealmQuery; import io.realm.RealmResults; import static org.fossasia.phimpme.gallery.activities.LFMainActivity.listAll; import static org.fossasia.phimpme.utilities.Utils.promptSpeechInput; /** * Created by dnld on 18/02/16. */ @SuppressWarnings("ResourceAsColor") public class SingleMediaActivity extends SharedMediaActivity implements ImageAdapter.OnSingleTap, ImageAdapter.enterTransition { private static int SLIDE_SHOW_INTERVAL = 5000; private static final String ISLOCKED_ARG = "isLocked"; static final String ACTION_OPEN_ALBUM = "android.intent.action.pagerAlbumMedia"; private static final String ACTION_REVIEW = "com.android.camera.action.REVIEW"; private int REQUEST_CODE_SD_CARD_PERMISSIONS = 42; private ImageAdapter adapter; private PreferenceUtil SP; private RelativeLayout ActivityBackground; private SelectAlbumBottomSheet bottomSheetDialogFragment; private SecurityHelper securityObj; private boolean fullScreenMode, customUri = false; public static final int TAKE_PHOTO_CODE = 8; public static final int ACTION_REQUEST_EDITIMAGE = 9; public static final int ACTION_REQUEST_COMPRESSIMAGE = 13; public static final int ACTION_STICKERS_IMAGE = 10; private Bitmap mainBitmap; private int imageWidth, imageHeight; private String path; private SingleMediaActivity context; public static final String EXTRA_OUTPUT = "extra_output"; public static String pathForDescription; public Boolean allPhotoMode; public Boolean favphotomode; public Boolean upoadhis; private Boolean trashdis; public int all_photo_pos; public int size_all; public int current_image_pos; private Uri uri; private Realm realm; private FavouriteImagesModel fav; private DatabaseHelper databaseHelper; private Handler handler; private Runnable runnable; boolean slideshow = false; private boolean details = false; private ArrayList<Media> favouriteslist; public static Media mediacompress = null; private ArrayList<Media> uploadhistory; private ArrayList<Media> trashbinlistd; ImageDescModel temp; private final int REQ_CODE_SPEECH_INPUT = 100; String voiceInput; EditText editTextDescription; private RelativeLayout relativeLayout; @Nullable @BindView(R.id.view_switcher_single_media) ViewSwitcher viewSwitcher; @Nullable @BindView(R.id.PhotoPager_Layout) View parentView; @Nullable @BindView(R.id.toolbar_bottom) ActionMenuView bottomBar; @Nullable @BindView(R.id.img) ImageView imgView; @Nullable @BindView(R.id.photos_pager) PagerRecyclerView mViewPager; @Nullable @BindView(R.id.toolbar) Toolbar toolbar; Runnable slideShowRunnable = new Runnable() { @Override public void run() { try { if (!allPhotoMode && !favphotomode) { mViewPager.scrollToPosition((getAlbum().getCurrentMediaIndex() + 1) % getAlbum().getMedia().size()); } else if (allPhotoMode && !favphotomode) { mViewPager.scrollToPosition((current_image_pos + 1) % listAll.size()); } else if (favphotomode && !allPhotoMode) { mViewPager.scrollToPosition((current_image_pos + 1) % favouriteslist.size()); } } catch (Exception e) { e.printStackTrace(); } finally { if (getAlbum().getCurrentMediaIndex() + 1 == getAlbum().getMedia().size() - 1) { handler.removeCallbacks(slideShowRunnable); slideshow = false; toggleSystemUI(); } else { handler.postDelayed(this, SLIDE_SHOW_INTERVAL); } } } }; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); supportPostponeEnterTransition(); context = this; setContentView(R.layout.activity_pager); ButterKnife.bind(this); relativeLayout = (RelativeLayout) findViewById(R.id.PhotoPager_Layout); DisplayMetrics metrics = getResources().getDisplayMetrics(); imageWidth = metrics.widthPixels; imageHeight = metrics.heightPixels; handler = new Handler(); runnable = new Runnable() { @Override public void run() { hideSystemUI(); } }; startHandler(); overridePendingTransition(R.anim.media_zoom_in, 0); SP = PreferenceUtil.getInstance(getApplicationContext()); securityObj = new SecurityHelper(SingleMediaActivity.this); favphotomode = getIntent().getBooleanExtra("fav_photos", false); upoadhis = getIntent().getBooleanExtra("uploadhistory", false); trashdis = getIntent().getBooleanExtra("trashbin", false); allPhotoMode = getIntent().getBooleanExtra(getString(R.string.all_photo_mode), false); all_photo_pos = getIntent().getIntExtra(getString(R.string.position), 0); size_all = getIntent().getIntExtra(getString(R.string.allMediaSize), getAlbum().getCount()); if (getIntent().hasExtra("favouriteslist")) { favouriteslist = getIntent().getParcelableArrayListExtra("favouriteslist"); } if (getIntent().hasExtra("datalist")) { uploadhistory = getIntent().getParcelableArrayListExtra("datalist"); } if (getIntent().hasExtra("trashdatalist")) { trashbinlistd = getIntent().getParcelableArrayListExtra("trashdatalist"); } String path2 = getIntent().getStringExtra("path"); pathForDescription = path2; // mViewPager.setLocked(savedInstanceState.getBoolean(ISLOCKED_ARG, false)); try { Album album; if ((getIntent().getAction().equals(Intent.ACTION_VIEW) || getIntent().getAction().equals(ACTION_REVIEW)) && getIntent().getData() != null) { String path = ContentHelper.getMediaPath(getApplicationContext(), getIntent().getData()); pathForDescription = path; File file = null; if (path != null) file = new File(path); if (file != null && file.isFile()) { //the image is stored in the storage album = new Album(getApplicationContext(), file); } else { //try to show with Uri album = new Album(getApplicationContext(), getIntent().getData()); customUri = true; } getAlbums().addAlbum(0, album); } setUpSwitcherAnimation(); initUI(); setupUI(); } catch (Exception e) { e.printStackTrace(); } } private void setUpSwitcherAnimation() { Animation in = AnimationUtils.loadAnimation(this, android.R.anim.fade_in); Animation out = AnimationUtils.loadAnimation(this, android.R.anim.fade_out); viewSwitcher.setInAnimation(in); viewSwitcher.setOutAnimation(out); } private void initUI() { final Menu bottomMenu = bottomBar.getMenu(); getMenuInflater().inflate(R.menu.menu_bottom_view_pager, bottomMenu); if (upoadhis) { bottomMenu.findItem(R.id.action_favourites).setVisible(false); bottomMenu.findItem(R.id.action_edit).setVisible(false); bottomMenu.findItem(R.id.action_compress).setVisible(false); } if (trashdis) { bottomMenu.findItem(R.id.action_favourites).setVisible(false); bottomMenu.findItem(R.id.action_edit).setVisible(false); bottomMenu.findItem(R.id.action_compress).setVisible(false); bottomMenu.findItem(R.id.action_share).setVisible(false); bottomMenu.findItem(R.id.restore_action).setVisible(true); bottomMenu.findItem(R.id.action_details).setVisible(false); //bottomMenu.findItem(R.id.action_delete).setVisible(false); } if (!allPhotoMode && favphotomode) { bottomBar.getMenu().getItem(5).setVisible(false); } for (int i = 0; i < bottomMenu.size(); i++) { bottomMenu.getItem(i).setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { stopHandler(); return onOptionsItemSelected(item); } }); } setSupportActionBar(toolbar); toolbar.bringToFront(); toolbar.setNavigationIcon(getToolbarIcon(CommunityMaterial.Icon.cmd_arrow_left)); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onBackPressed(); } }); setRecentApp(getString(R.string.app_name)); setupSystemUI(); final LinearLayoutManager linearLayoutManager = new LinearLayoutManager(ActivitySwitchHelper.getContext(), LinearLayoutManager.HORIZONTAL, false); mViewPager.setLayoutManager(linearLayoutManager); mViewPager.setHasFixedSize(true); mViewPager.setLongClickable(true); getWindow().getDecorView().setOnSystemUiVisibilityChangeListener (new View.OnSystemUiVisibilityChangeListener() { @Override public void onSystemUiVisibilityChange(int visibility) { if ((visibility & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0) showSystemUI(); else hideSystemUI(); } }); BasicCallBack basicCallBack = new BasicCallBack() { @Override public void callBack(int status, Object data) { toggleSystemUI(); } }; if (!allPhotoMode && !favphotomode && !upoadhis && !trashdis) { adapter = new ImageAdapter(getAlbum().getMedia(), basicCallBack, this, this); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum() .getMedia().size()); current_image_pos = all_photo_pos; // toolbar.setTitle((mViewPager.getCurrentItem() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); if (bottomMenu.findItem(R.id.action_favourites).getIcon().getColorFilter() == null) { if (!favsearch(getAlbum().getMedia(current_image_pos).getPath())) { bottomMenu.findItem(R.id.action_favourites).getIcon().clearColorFilter(); } else { bottomMenu.findItem(R.id.action_favourites).getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); } } mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { getAlbum().setCurrentPhotoIndex(position); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); invalidateOptionsMenu(); if (!favsearch(getAlbum().getMedia(position).getPath())) { bottomMenu.findItem(R.id.action_favourites).getIcon().clearColorFilter(); } else { bottomMenu.findItem(R.id.action_favourites).getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); } pathForDescription = getAlbum().getMedia().get(position).getPath(); } }); mViewPager.scrollToPosition(getAlbum().getCurrentMediaIndex()); } else if (allPhotoMode && !favphotomode && !upoadhis && !trashdis) { adapter = new ImageAdapter(LFMainActivity.listAll, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + getString(R.string.of) + " " + size_all); current_image_pos = all_photo_pos; if (bottomMenu.findItem(R.id.action_favourites).getIcon().getColorFilter() == null) { if (!favsearch(listAll.get(current_image_pos).getPath())) { bottomMenu.findItem(R.id.action_favourites).getIcon().clearColorFilter(); } else { bottomMenu.findItem(R.id.action_favourites).getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); } } mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); if (!favsearch(listAll.get(current_image_pos).getPath())) { bottomMenu.findItem(R.id.action_favourites).getIcon().clearColorFilter(); } else { bottomMenu.findItem(R.id.action_favourites).getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); } pathForDescription = listAll.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } else if (!allPhotoMode && favphotomode && !upoadhis && !trashdis) { adapter = new ImageAdapter(favouriteslist, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + getString(R.string.of) + " " + size_all); current_image_pos = all_photo_pos; mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = favouriteslist.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } else if (!favphotomode && !allPhotoMode && upoadhis && !trashdis) { adapter = new ImageAdapter(uploadhistory, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + getString(R.string.of) + " " + size_all); current_image_pos = all_photo_pos; mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = uploadhistory.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } else if (trashdis && !upoadhis && !favphotomode && !allPhotoMode) { adapter = new ImageAdapter(trashbinlistd, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + "of" + " " + size_all); current_image_pos = all_photo_pos; mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = trashbinlistd.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } Display aa = ((WindowManager) getSystemService(WINDOW_SERVICE)).getDefaultDisplay(); mViewPager.setAdapter(adapter); if (aa.getRotation() == Surface.ROTATION_90) { Configuration configuration = new Configuration(); configuration.orientation = Configuration.ORIENTATION_LANDSCAPE; onConfigurationChanged(configuration); } } private void setupUI() { /**** Theme ****/ toolbar = (Toolbar) findViewById(R.id.toolbar); toolbar.setBackgroundColor( isApplyThemeOnImgAct() ? ColorPalette.getTransparentColor(getPrimaryColor(), getTransparency()) : ColorPalette.getTransparentColor(getDefaultThemeToolbarColor3th(), 175)); toolbar.setPopupTheme(getPopupToolbarStyle()); ActivityBackground = (RelativeLayout) findViewById(R.id.PhotoPager_Layout); ActivityBackground.setBackgroundColor(getBackgroundColor()); setStatusBarColor(); setNavBarColor(); securityObj.updateSecuritySetting(); /**** SETTINGS ****/ if (SP.getBoolean("set_max_luminosity", false)) updateBrightness(1.0F); else try { float brightness = Settings.System.getInt( getContentResolver(), Settings.System.SCREEN_BRIGHTNESS); brightness = brightness == 1.0F ? 255.0F : brightness; updateBrightness(brightness); } catch (Settings.SettingNotFoundException e) { e.printStackTrace(); } if (SP.getBoolean("set_picture_orientation", false)) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR); else setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_USER); } /** * startHandler and stopHandler are helper methods for onUserInteraction, that auto-hides the nav-bars * and switch the activity to full screen, thus giving more better UX. */ private void startHandler() { handler.postDelayed(runnable, 5000); } private void stopHandler() { handler.removeCallbacks(runnable); } @Override public void onUserInteraction() { super.onUserInteraction(); stopHandler(); startHandler(); } @Override public void onResume() { super.onResume(); ActivitySwitchHelper.setContext(this); setupUI(); } @Override protected void onStop() { super.onStop(); stopHandler(); SP.putBoolean("auto_update_media", true); } @Override public void onLowMemory() { super.onLowMemory(); Glide.get(getApplicationContext()).clearMemory(); Glide.get(getApplicationContext()).trimMemory(TRIM_MEMORY_COMPLETE); System.gc(); } @Override public boolean onMenuOpened(int featureId, Menu menu) { if (featureId == AppCompatDelegate.FEATURE_SUPPORT_ACTION_BAR && menu != null) stopHandler(); return super.onMenuOpened(featureId, menu); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_view_pager, menu); return true; } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT); if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) params.setMargins(0, 0, Measure.getNavigationBarSize(SingleMediaActivity.this).x, 0); else params.setMargins(0, 0, 0, 0); toolbar.setLayoutParams(params); setUpViewPager(); } private boolean favsearch(String path) { boolean favis = false; realm = Realm.getDefaultInstance(); RealmResults<FavouriteImagesModel> realmQuery = realm.where(FavouriteImagesModel.class).findAll(); for (int i = 0; i < realmQuery.size(); i++) { if (realmQuery.get(i).getPath().equals(path)) { favis = true; break; } } return favis; } private void performrealmaction(final ImageDescModel descModel, String newpath) { realm = Realm.getDefaultInstance(); int index = descModel.getId().lastIndexOf("/"); String name = descModel.getId().substring(index + 1); String newpathy = newpath + "/" + name; realm.beginTransaction(); ImageDescModel imageDescModel = realm.createObject(ImageDescModel.class, newpathy); imageDescModel.setTitle(descModel.getTitle()); realm.commitTransaction(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<ImageDescModel> result = realm.where(ImageDescModel.class).equalTo ("path", descModel.getId()).findAll(); result.deleteAllFromRealm(); } }); } private void getdescriptionpaths(String patjs, String newpth) { realm = Realm.getDefaultInstance(); RealmQuery<ImageDescModel> realmQuery = realm.where(ImageDescModel.class); for (int i = 0; i < realmQuery.count(); i++) { if (realmQuery.findAll().get(i).getId().equals(patjs)) { performrealmaction(realmQuery.findAll().get(i), newpth); break; } } } @Override public boolean onPrepareOptionsMenu(final Menu menu) { if (allPhotoMode || favphotomode) { menu.findItem(R.id.action_cover).setVisible(false); } if (!allPhotoMode && !favphotomode && !upoadhis && !trashdis) { menu.setGroupVisible(R.id.only_photos_options, true); } else if (!allPhotoMode && favphotomode && !upoadhis && !trashdis) { menu.findItem(R.id.action_copy).setVisible(false); menu.findItem(R.id.rename_photo).setVisible(false); menu.findItem(R.id.action_move).setVisible(false); } else if (!allPhotoMode && !favphotomode && (upoadhis || trashdis)) { menu.findItem(R.id.action_copy).setVisible(false); menu.findItem(R.id.action_move).setVisible(false); menu.findItem(R.id.rename_photo).setVisible(false); menu.findItem(R.id.slide_show).setVisible(false); menu.findItem(R.id.action_use_as).setVisible(false); menu.findItem(R.id.action_cover).setVisible(false); menu.findItem(R.id.action_description).setVisible(false); } if (customUri) { menu.setGroupVisible(R.id.on_internal_storage, false); menu.setGroupVisible(R.id.only_photos_options, false); menu.findItem(R.id.sort_action).setVisible(false); } return true; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQ_CODE_SPEECH_INPUT && data != null) { ArrayList<String> result = data .getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); voiceInput = result.get(0); editTextDescription.setText(editTextDescription.getText().toString().trim() + " " + voiceInput); editTextDescription.setSelection(editTextDescription.length()); return; } if (resultCode == RESULT_OK && requestCode == REQUEST_CODE_SD_CARD_PERMISSIONS) { Uri treeUri = data.getData(); // Persist URI in shared preference so that you can use it later. ContentHelper.saveSdCardInfo(getApplicationContext(), treeUri); getContentResolver().takePersistableUriPermission(treeUri, Intent.FLAG_GRANT_WRITE_URI_PERMISSION); } if (data != null && resultCode == RESULT_OK) { switch (requestCode) { case UCrop.REQUEST_CROP: final Uri imageUri = UCrop.getOutput(data); if (imageUri != null && imageUri.getScheme().equals("file")) { try { //copyFileToDownloads(imageUri); // TODO: 21/08/16 handle this better handleEditorImage(data); if (ContentHelper.copyFile(getApplicationContext(), new File(imageUri.getPath()), new File(getAlbum().getPath()))) { //((ImageFragment) adapter.getRegisteredFragment(getAlbum().getCurrentMediaIndex())).displayMedia(true); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.new_file_created), bottomBar.getHeight()); } //adapter.notifyDataSetChanged(); } catch (Exception e) { Log.e("ERROS - uCrop", imageUri.toString(), e); } } else SnackBarHandler.showWithBottomMargin(parentView, "errori random", bottomBar.getHeight()); break; default: break; } } } private void handleEditorImage(Intent data) { String newFilePath = data.getStringExtra(EditImageActivity.EXTRA_OUTPUT); boolean isImageEdit = data.getBooleanExtra(EditImageActivity.IMAGE_IS_EDIT, false); if (isImageEdit) { } else {//Or use the original unedited pictures newFilePath = data.getStringExtra(EditImageActivity.FILE_PATH); } //System.out.println("newFilePath---->" + newFilePath); //File file = new File(newFilePath); //System.out.println("newFilePath size ---->" + (file.length() / 1024)+"KB"); Log.d("image is edit", isImageEdit + ""); LoadImageTask loadTask = new LoadImageTask(); loadTask.execute(newFilePath); } private void displayAlbums(boolean reload) { Intent i = new Intent(SingleMediaActivity.this, LFMainActivity.class); Bundle b = new Bundle(); b.putInt(SplashScreen.CONTENT, SplashScreen.ALBUMS_PREFETCHED); if (!reload) i.putExtras(b); startActivity(i); finish(); } private void deleteCurrentMedia() { boolean success = false; if (!allPhotoMode && !favphotomode && !upoadhis && !trashdis) { if (AlertDialogsHelper.check) { success = addToTrash(); } else { success = getAlbum().deleteCurrentMedia(getApplicationContext()); } if (!success) { final AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, dialogBuilder, R.string.sd_card_write_permission_title, R.string.sd_card_permissions_message, null); dialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) startActivityForResult(new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE), REQUEST_CODE_SD_CARD_PERMISSIONS); } }); dialogBuilder.show(); } if (getAlbum().getMedia().size() == 0) { if (customUri) finish(); else { getAlbums().removeCurrentAlbum(); displayAlbums(false); } } adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); } else if (allPhotoMode && !favphotomode && !upoadhis && !trashdis) { int c = current_image_pos; if (AlertDialogsHelper.check) { success = addToTrash(); } else { deleteMedia(listAll.get(current_image_pos).getPath()); success = true; } if (success) { LFMainActivity.listAll.remove(current_image_pos); size_all = LFMainActivity.listAll.size(); adapter.notifyDataSetChanged(); //SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_msg)); } if (current_image_pos != size_all) getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); // mViewPager.setCurrentItem(current_image_pos); // toolbar.setTitle((mViewPager.getCurrentItem() + 1) + " " + getString(R.string.of) + " " + size_all); } else if (favphotomode && !allPhotoMode && !upoadhis && !trashdis) { int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<FavouriteImagesModel> favouriteImagesModels = realm.where(FavouriteImagesModel .class).equalTo("path", favouriteslist.get(current_image_pos).getPath()).findAll(); favouriteImagesModels.deleteAllFromRealm(); } }); deleteFromList(favouriteslist.get(current_image_pos).getPath()); size_all = favouriteslist.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } else if (!favphotomode && !allPhotoMode && upoadhis && !trashdis) { int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); if(uploadhistory.get(current_image_pos).getPath().contains(".nomedia")){ File file = new File(uploadhistory.get(current_image_pos).getPath()); if(file.exists()){ file.delete(); } } realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<UploadHistoryRealmModel> uploadHistoryImagesModels = realm.where(UploadHistoryRealmModel .class).equalTo("pathname", uploadhistory.get(current_image_pos).getPath()).findAll(); uploadHistoryImagesModels.deleteAllFromRealm(); } }); deleteFromList(uploadhistory.get(current_image_pos).getPath()); size_all = uploadhistory.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } else if (trashdis && !favphotomode && !upoadhis && !allPhotoMode) { int c = current_image_pos; realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<TrashBinRealmModel> trashBinRealmModels = realm.where(TrashBinRealmModel.class). equalTo("trashbinpath", trashbinlistd.get(current_image_pos).getPath()).findAll(); trashBinRealmModels.deleteAllFromRealm(); } }); deleteFromList(trashbinlistd.get(current_image_pos).getPath()); size_all = trashbinlistd.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); //SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } } private void addTrashObjectsToRealm(String mediaPath){ String trashbinpath = Environment.getExternalStorageDirectory() + "/" + ".nomedia"; realm = Realm.getDefaultInstance(); realm.beginTransaction(); String name = mediaPath.substring(mediaPath.lastIndexOf("/") + 1); String trashpath = trashbinpath + "/" + name; TrashBinRealmModel trashBinRealmModel = realm.createObject(TrashBinRealmModel.class, trashpath); trashBinRealmModel.setOldpath(mediaPath); trashBinRealmModel.setDatetime(new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date())); trashBinRealmModel.setTimeperiod("null"); realm.commitTransaction(); } private void deleteFromList(String path){ if(favphotomode){ for (int i = 0; i < favouriteslist.size(); i++){ if(favouriteslist.get(i).getPath().equals(path)){ favouriteslist.remove(i); break; } } } else if(upoadhis){ for (int i = 0; i < uploadhistory.size(); i++){ if(uploadhistory.get(i).getPath().equals(path)){ uploadhistory.remove(i); break; } } } else if(trashdis){ for (int i = 0; i < trashbinlistd.size(); i++){ if(trashbinlistd.get(i).getPath().equals(path)){ trashbinlistd.remove(i); break; } } } } private boolean addToTrash(){ String pathOld = null; String oldpath = null; int no = 0; boolean succ = false; if(!allPhotoMode && !favphotomode && !upoadhis){ oldpath = getAlbum().getCurrentMedia().getPath(); } else if(allPhotoMode && !favphotomode && !upoadhis){ oldpath = listAll.get(current_image_pos).getPath(); } File file = new File(Environment.getExternalStorageDirectory() + "/" + ".nomedia"); if (file.exists() && file.isDirectory()) { if (!allPhotoMode && !favphotomode) { pathOld = getAlbum().getCurrentMedia().getPath(); succ = getAlbum().moveCurrentMedia(getApplicationContext(), file.getAbsolutePath()); } else if (allPhotoMode && !favphotomode){ pathOld = listAll.get(current_image_pos).getPath(); succ = getAlbum().moveAnyMedia(getApplicationContext(), file.getAbsolutePath(), listAll.get (current_image_pos).getPath()); } if (succ) { Snackbar snackbar = SnackBarHandler.showWithBottomMargin2(parentView, getString(R.string .trashbin_move_onefile), navigationView.getHeight (), Snackbar.LENGTH_SHORT); final String finalOldpath = oldpath; snackbar.setAction("UNDO", new View.OnClickListener() { @Override public void onClick(View view) { getAlbum().moveAnyMedia(getApplicationContext(), getAlbum().getPath(), finalOldpath); } }); snackbar.show(); } else { SnackBarHandler.showWithBottomMargin(parentView, String.valueOf(no) + " " + getString(R.string .trashbin_move_error), navigationView.getHeight ()); } } else { if (file.mkdir()) { if (!allPhotoMode && !favphotomode) { pathOld = getAlbum().getCurrentMedia().getPath(); succ = getAlbum().moveCurrentMedia(getApplicationContext(), file.getAbsolutePath()); } else if (allPhotoMode && !favphotomode) { pathOld = getAlbum().getCurrentMedia().getPath(); succ = getAlbum().moveAnyMedia(getApplicationContext(), file.getAbsolutePath(), listAll.get (current_image_pos).getPath()); } if (succ) { SnackBarHandler.showWithBottomMargin(parentView, String.valueOf(no) + " " + getString(R.string .trashbin_move_onefile), navigationView.getHeight ()); } else { SnackBarHandler.showWithBottomMargin(parentView, String.valueOf(no) + " " + getString(R.string .trashbin_move_error), navigationView.getHeight ()); } } } addTrashObjectsToRealm(pathOld); return succ; } private void deleteMedia(String path) { String[] projection = {MediaStore.Images.Media._ID}; // Match on the file path String selection = MediaStore.Images.Media.DATA + " = ?"; String[] selectionArgs = new String[]{path}; // Query for the ID of the media matching the file path Uri queryUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; ContentResolver contentResolver = getContentResolver(); Cursor c = contentResolver.query(queryUri, projection, selection, selectionArgs, null); if (c.moveToFirst()) { // We found the ID. Deleting the item via the content provider will also remove the file long id = c.getLong(c.getColumnIndexOrThrow(MediaStore.Images.Media._ID)); Uri deleteUri = ContentUris.withAppendedId(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, id); contentResolver.delete(deleteUri, null, null); } c.close(); } private void deletefav(final String path){ realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<FavouriteImagesModel> favouriteImagesModels = realm.where(FavouriteImagesModel .class).equalTo("path", path).findAll(); favouriteImagesModels.deleteAllFromRealm(); } }); } private void deletefromfav(final MenuItem item){ String ButtonDelete = ""; final AlertDialog.Builder deleteDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.remove_from_favourites, R.string.delete_from_favourites_message, null); ButtonDelete = this.getString(R.string.remove); deleteDialog.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); deleteDialog.setPositiveButton(ButtonDelete.toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (securityObj.isActiveSecurity() && securityObj.isPasswordOnDelete()) { final boolean passco[] = {false}; final AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog (SingleMediaActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { //int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); item.getIcon().clearColorFilter(); deletefav(getAlbum().getCurrentMedia().getPath()); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.show(); passwordDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE|WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams .SOFT_INPUT_STATE_ALWAYS_VISIBLE); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { // int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); passwordDialog.dismiss(); item.getIcon().clearColorFilter(); SnackBarHandler.show(parentView, getApplicationContext().getString(R .string.photo_deleted_from_fav_msg)); deletefav(getAlbum().getCurrentMedia().getPath()); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else{ item.getIcon().clearColorFilter(); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); //deleteMedia(favouriteslist.get(current_image_pos).getPath()); deletefav(getAlbum().getCurrentMedia().getPath()); } } }); AlertDialog alertDialog = deleteDialog.create(); alertDialog.show(); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialog); } @Override public boolean onOptionsItemSelected(final MenuItem item) { switch (item.getItemId()) { case android.R.id.home: supportFinishAfterTransition(); return true; case R.id.action_copy: handler.removeCallbacks(slideShowRunnable); bottomSheetDialogFragment = new SelectAlbumBottomSheet(); bottomSheetDialogFragment.setTitle(getString(R.string.copy_to)); bottomSheetDialogFragment.setSelectAlbumInterface(new SelectAlbumBottomSheet.SelectAlbumInterface() { @Override public void folderSelected(String path) { File file = new File(path + "/" + getAlbum().getCurrentMedia().getName() + getAlbum() .getCurrentMedia().getPath().substring (getAlbum().getCurrentMedia().getPath().lastIndexOf("."))); if (file.exists()) { bottomSheetDialogFragment.dismiss(); } else { getAlbum().copyPhoto(getApplicationContext(), getAlbum().getCurrentMedia().getPath(), path); bottomSheetDialogFragment.dismiss(); SnackBarHandler.showWithBottomMargin(relativeLayout, getString(R.string.copied_successfully) + " to " + path, bottomBar.getHeight()); } } }); bottomSheetDialogFragment.show(getSupportFragmentManager(), bottomSheetDialogFragment.getTag()); break; case R.id.action_share: handler.removeCallbacks(slideShowRunnable); Intent share = new Intent(SingleMediaActivity.this, SharingActivity.class); if (!allPhotoMode) share.putExtra(EXTRA_OUTPUT, getAlbum().getCurrentMedia().getPath()); else share.putExtra(EXTRA_OUTPUT, listAll.get(current_image_pos).getPath()); startActivity(share); return true; case R.id.action_edit: handler.removeCallbacks(slideShowRunnable); if (!allPhotoMode && !favphotomode) { uri = Uri.fromFile(new File(getAlbum().getCurrentMedia().getPath())); } else if (allPhotoMode && !favphotomode) { uri = Uri.fromFile(new File(listAll.get(current_image_pos).getPath())); } else if (!allPhotoMode && favphotomode) { uri = Uri.fromFile(new File(favouriteslist.get(current_image_pos).getPath())); } final String extension = uri.getPath(); if (extension != null && !(extension.substring(extension.lastIndexOf(".")).equals(".gif"))) { Intent editIntent = new Intent(SingleMediaActivity.this, EditImageActivity.class); editIntent.putExtra("extra_input", uri.getPath()); editIntent.putExtra("extra_output", FileUtils.genEditFile(FileUtils.getExtension(extension)).getAbsolutePath()); editIntent.putExtra("requestCode", ACTION_REQUEST_EDITIMAGE); startActivity(editIntent); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.image_invalid), bottomBar.getHeight()); break; case R.id.action_use_as: handler.removeCallbacks(slideShowRunnable); Intent intent = new Intent(Intent.ACTION_ATTACH_DATA); if (!allPhotoMode) intent.setDataAndType( getAlbum().getCurrentMedia().getUri(), getAlbum().getCurrentMedia().getMimeType()); else intent.setDataAndType(Uri.fromFile(new File(listAll.get(current_image_pos).getPath())), StringUtils.getMimeType(listAll.get(current_image_pos).getPath())); startActivity(Intent.createChooser(intent, getString(R.string.use_as))); return true; case R.id.print: PrintHelper photoPrinter = new PrintHelper(this); photoPrinter.setScaleMode(PrintHelper.SCALE_MODE_FIT); Bitmap bitmap = BitmapFactory.decodeFile(getAlbum().getCurrentMedia().getPath(), new BitmapFactory.Options()); photoPrinter.printBitmap(getString(R.string.print), bitmap); return true; case R.id.rename_photo: String currentpath = null; if (!allPhotoMode) { currentpath = getAlbum().getCurrentMedia().getPath(); } else { currentpath = listAll.get(current_image_pos).getPath(); } final File file = new File(currentpath); int indexofdot = file.getPath().lastIndexOf("."); int indert = file.getPath().lastIndexOf("/"); String namefile = file.getPath().substring(indert + 1, indexofdot); final String imageextension = file.getPath().substring(indexofdot + 1); AlertDialog.Builder renameDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextNewName = new EditText(getApplicationContext()); editTextNewName.setText(namefile); editTextNewName.setSelectAllOnFocus(true); editTextNewName.setHint(R.string.description_hint); editTextNewName.setHintTextColor(ContextCompat.getColor(getApplicationContext(), R.color.grey)); editTextNewName.setHighlightColor(ContextCompat.getColor(getApplicationContext(), R.color.cardview_shadow_start_color)); editTextNewName.selectAll(); editTextNewName.setSingleLine(false); AlertDialogsHelper.getInsertTextDialog(SingleMediaActivity.this, renameDialogBuilder, editTextNewName, R.string.rename_image, null); renameDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); renameDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should br empty it will be overwrite later //to avoid dismiss of the dialog } }); final AlertDialog renameDialog = renameDialogBuilder.create(); renameDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION); editTextNewName.setSelection(editTextNewName.getText().toString().length()); renameDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface .BUTTON_NEGATIVE}, getAccentColor(), renameDialog); renameDialog.getButton(AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, ContextCompat .getColor(SingleMediaActivity.this, R.color.grey), renameDialog); editTextNewName.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if (TextUtils.isEmpty(editable)) { // Disable ok button renameDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, ContextCompat.getColor(SingleMediaActivity.this, R.color.grey), renameDialog); } else { // Something into edit text. Enable the button. renameDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(true); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), renameDialog); } } }); renameDialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View dialog) { if (editTextNewName.length() != 0) { int index = file.getPath().lastIndexOf("/"); String path = file.getPath().substring(0, index); File newname = new File(path + "/" + editTextNewName.getText().toString() + "." + imageextension); if (file.renameTo(newname)) { ContentResolver resolver = getApplicationContext().getContentResolver(); resolver.delete( MediaStore.Images.Media.EXTERNAL_CONTENT_URI, MediaStore.Images.Media.DATA + "=?", new String[]{file.getAbsolutePath()}); Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE); intent.setData(Uri.fromFile(newname)); getApplicationContext().sendBroadcast(intent); } if (!allPhotoMode) { int a = getAlbum().getCurrentMediaIndex(); getAlbum().getMedia(a).setPath(newname.getPath()); } else { listAll.get(current_image_pos).setPath(newname.getPath()); } renameDialog.dismiss(); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.rename_succes), navigationView .getHeight()); } } }); return true; case R.id.action_favourites: realm = Realm.getDefaultInstance(); String realpath = getAlbum().getCurrentMedia().getPath(); RealmQuery<FavouriteImagesModel> query = realm.where(FavouriteImagesModel.class).equalTo("path", realpath); if (query.count() == 0) { realm.beginTransaction(); fav = realm.createObject(FavouriteImagesModel.class, realpath); ImageDescModel q = realm.where(ImageDescModel.class).equalTo("path", realpath).findFirst(); if (q != null) { fav.setDescription(q.getTitle()); } else { fav.setDescription(" "); } item.getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); realm.commitTransaction(); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.add_favourite), bottomBar.getHeight()); } else { deletefromfav(item); } break; case R.id.action_compress: handler.removeCallbacks(slideShowRunnable); if (!allPhotoMode) uri = Uri.fromFile(new File(getAlbum().getCurrentMedia().getPath())); else uri = Uri.fromFile(new File(listAll.get(current_image_pos).getPath())); String extension1 = uri.getPath(); if (extension1 != null && !(extension1.substring(extension1.lastIndexOf(".")).equals(".gif"))) { Intent compressIntent = new Intent(SingleMediaActivity.this, CompressImageActivity.class); if (!allPhotoMode) compressIntent.putExtra(EXTRA_OUTPUT, getAlbum().getCurrentMedia().getPath()); else compressIntent.putExtra(EXTRA_OUTPUT, listAll.get(current_image_pos).getPath()); startActivity(compressIntent); //to send the resolution of image handler.removeCallbacks(slideShowRunnable); if (!allPhotoMode && !favphotomode) { mediacompress = getAlbum().getCurrentMedia(); } else if (allPhotoMode && !favphotomode) { mediacompress = new Media(new File(listAll.get(current_image_pos).getPath())); } else if (!allPhotoMode && favphotomode) { mediacompress = new Media(new File(favouriteslist.get(current_image_pos).getPath())); } } else SnackBarHandler.show(parentView, R.string.image_invalid); break; case R.id.action_delete: String ButtonDelete = ""; handler.removeCallbacks(slideShowRunnable); deleteaction(ButtonDelete); return true; case R.id.slide_show: handler.removeCallbacks(slideShowRunnable); setSlideShowDialog(); return true; case R.id.action_move: final String pathcurrent = getAlbum().getCurrentMedia().getPath(); handler.removeCallbacks(slideShowRunnable); bottomSheetDialogFragment = new SelectAlbumBottomSheet(); bottomSheetDialogFragment.setTitle(getString(R.string.move_to)); bottomSheetDialogFragment.setSelectAlbumInterface(new SelectAlbumBottomSheet.SelectAlbumInterface() { @Override public void folderSelected(String path) { getAlbum().moveCurrentMedia(getApplicationContext(), path); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); if (getAlbum().getMedia().size() == 0) { if (customUri) finish(); else { getAlbums().removeCurrentAlbum(); displayAlbums(false); } } adapter.notifyDataSetChanged(); getdescriptionpaths(pathcurrent, path); // toolbar.setTitle((mViewPager.getCurrentItem() + 1) + " " + getString(R.string.of) + " " + getAlbum().getCount()); bottomSheetDialogFragment.dismiss(); SnackBarHandler.showWithBottomMargin(relativeLayout, getString(R.string.photo_moved_successfully) + " to " + path, bottomBar.getHeight()); } }); bottomSheetDialogFragment.show(getSupportFragmentManager(), bottomSheetDialogFragment.getTag()); return true; case R.id.action_cover: AlbumSettings albumSettings = AlbumSettings.getSettings(getApplicationContext(), getAlbum()); albumSettings.changeCoverPath(getApplicationContext(), getAlbum().getCurrentMedia().getPath()); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.change_cover), bottomBar.getHeight()); return true; case R.id.action_details: Media media = null; handler.removeCallbacks(slideShowRunnable); details = true; displaydetails(media); toggleSystemUI(); viewSwitcher.showNext(); break; case R.id.action_settings: handler.removeCallbacks(slideShowRunnable); startActivity(new Intent(getApplicationContext(), SettingsActivity.class)); break; case R.id.restore_action: String button = ""; final AlertDialog.Builder deleteDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.restore, R .string.restore_image, null); button = this.getString(R.string.restore); deleteDialog.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); deleteDialog.setPositiveButton(button.toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (securityObj.isActiveSecurity() && securityObj.isPasswordOnDelete()) { final boolean passco[] = {false}; final AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog (SingleMediaActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { restoreImage(trashbinlistd.get(current_image_pos).getPath()); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if (securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]) { securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else { passco[0] = false; } } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.show(); passwordDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams .SOFT_INPUT_STATE_ALWAYS_VISIBLE); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { restoreImage(trashbinlistd.get(current_image_pos).getPath()); passwordDialog.dismiss(); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else restoreImage(trashbinlistd.get(current_image_pos).getPath()); } }); AlertDialog alertDialog = deleteDialog.create(); alertDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialog); return true; case R.id.action_description: handler.removeCallbacks(slideShowRunnable); AlertDialog.Builder descriptionDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); editTextDescription = getDescriptionDialog(SingleMediaActivity.this, descriptionDialogBuilder); editTextDescription.setSelectAllOnFocus(true); editTextDescription.setHighlightColor(ContextCompat.getColor(getApplicationContext(), R.color .cardview_shadow_start_color)); editTextDescription.selectAll(); editTextDescription.setSingleLine(false); editTextDescription.setHintTextColor(getResources().getColor(R.color.grey, null)); descriptionDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); descriptionDialogBuilder.setPositiveButton((temp != null && temp.getTitle().length() != 0) ? getString(R.string.update_action) : getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should br empty it will be overwrite later } }); descriptionDialogBuilder.setNeutralButton(getString(R.string.delete).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This will be overwrite later } }); final AlertDialog descriptionDialog = descriptionDialogBuilder.create(); descriptionDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface .BUTTON_NEGATIVE}, getAccentColor(), descriptionDialog); descriptionDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE|WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); descriptionDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); descriptionDialog.getButton(AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getColor(R.color.grey), descriptionDialog); if(temp == null){ descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setTextColor(getColor(R.color.grey)); descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setEnabled(false); } else { descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setTextColor(getAccentColor()); descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setEnabled(true); } editTextDescription.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if (TextUtils.isEmpty(editable)) { // Disable ok button descriptionDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getColor(R.color.grey), descriptionDialog); } else { // Something into edit text. Enable the button. descriptionDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(true); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), descriptionDialog); } } }); descriptionDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { descriptionDialog.dismiss(); voiceInput = editTextDescription.getText().toString(); if (temp == null) { databaseHelper.addImageDesc(new ImageDescModel(pathForDescription, editTextDescription.getText().toString())); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.description_saved), bottomBar.getHeight()); } else { databaseHelper.update(new ImageDescModel(pathForDescription, editTextDescription.getText().toString())); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.description_updated), bottomBar.getHeight()); } } }); descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (temp == null){ descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setEnabled(false); } else{ descriptionDialog.dismiss(); databaseHelper.delete(temp); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.description_deleted), bottomBar.getHeight()); } } }); break; default: // If we got here, the user's action was not recognized. // Invoke the superclass to handle it. //return super.onOptionsItemSelected(item); } return super.onOptionsItemSelected(item); } private void restoreImage(String path){ realm = Realm.getDefaultInstance(); RealmResults<TrashBinRealmModel> trashBinRealmModels = realm.where(TrashBinRealmModel.class). equalTo("trashbinpath", path).findAll(); String oldpath = trashBinRealmModels.get(0).getOldpath(); String oldFolder = oldpath.substring(0, oldpath.lastIndexOf("/")); if(restoreMove(context, trashBinRealmModels.get(0).getTrashbinpath(), oldFolder)){ scanFile(context, new String[]{ trashBinRealmModels.get(0).getTrashbinpath(), StringUtils.getPhotoPathMoved (trashBinRealmModels.get(0).getTrashbinpath(), oldFolder) }); if( removeFromRealm(trashBinRealmModels.get(0).getTrashbinpath())){ deleteFromList(trashbinlistd.get(current_image_pos).getPath()); size_all = trashbinlistd.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((current_image_pos + 1) + " " + getString(R.string.of) + " " + size_all); //SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } } } public void scanFile(Context context, String[] path) { MediaScannerConnection.scanFile(context, path, null, null); } private boolean restoreMove(Context context, String source, String targetDir){ File from = new File(source); File to = new File(targetDir); return ContentHelper.moveFile(context, from, to); } private boolean removeFromRealm(final String path){ final boolean[] delete = {false}; Realm realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<TrashBinRealmModel> result = realm.where(TrashBinRealmModel.class).equalTo ("trashbinpath", path).findAll(); delete[0] = result.deleteAllFromRealm(); } }); return delete[0]; } public EditText getDescriptionDialog(final ThemedActivity activity, AlertDialog.Builder descriptionDialog) { final View DescriptiondDialogLayout = activity.getLayoutInflater().inflate(R.layout.dialog_description, null); final TextView DescriptionDialogTitle = (TextView) DescriptiondDialogLayout.findViewById(R.id.description_dialog_title); final CardView DescriptionDialogCard = (CardView) DescriptiondDialogLayout.findViewById(R.id.description_dialog_card); EditText editxtDescription = (EditText) DescriptiondDialogLayout.findViewById(R.id.description_edittxt); ImageButton VoiceRecognition = (ImageButton) DescriptiondDialogLayout.findViewById(R.id.voice_input); VoiceRecognition.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { promptSpeechInput(SingleMediaActivity.this, REQ_CODE_SPEECH_INPUT, parentView, getString(R.string.speech_prompt)); } }); DescriptionDialogTitle.setBackgroundColor(activity.getPrimaryColor()); DescriptionDialogCard.setBackgroundColor(activity.getCardBackgroundColor()); ThemeHelper.setCursorDrawableColor(editxtDescription, activity.getTextColor()); editxtDescription.getBackground().mutate().setColorFilter(activity.getTextColor(), PorterDuff.Mode.SRC_ATOP); editxtDescription.setTextColor(activity.getTextColor()); realm = Realm.getDefaultInstance(); databaseHelper = new DatabaseHelper(realm); temp = databaseHelper.getImageDesc(pathForDescription); if (temp != null && temp.getTitle().length() != 0) { editxtDescription.setText(temp.getTitle()); editxtDescription.setSelection(editxtDescription.getText().length()); //Toast.makeText(SingleMediaActivity.this, voiceInput, Toast.LENGTH_SHORT).show(); } descriptionDialog.setView(DescriptiondDialogLayout); return editxtDescription; } private void updateBrightness(float level) { WindowManager.LayoutParams lp = getWindow().getAttributes(); lp.screenBrightness = level; getWindow().setAttributes(lp); } @SuppressWarnings("ResourceAsColor") private UCrop.Options getUcropOptions() { UCrop.Options options = new UCrop.Options(); options.setCompressionFormat(Bitmap.CompressFormat.PNG); options.setCompressionQuality(90); options.setActiveWidgetColor(getAccentColor()); options.setToolbarColor(getPrimaryColor()); options.setStatusBarColor(isTranslucentStatusBar() ? ColorPalette.getObscuredColor(getPrimaryColor()) : getPrimaryColor()); options.setCropFrameColor(getAccentColor()); options.setFreeStyleCropEnabled(true); return options; } @Override public void setNavBarColor() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { if (isApplyThemeOnImgAct()) if (isNavigationBarColored()) getWindow().setNavigationBarColor(ColorPalette.getTransparentColor(ColorPalette.getObscuredColor(getPrimaryColor()), getTransparency())); else getWindow().setNavigationBarColor(ColorPalette.getTransparentColor(ContextCompat.getColor(getApplicationContext(), R.color.md_black_1000), getTransparency())); else getWindow().setNavigationBarColor(ColorPalette.getTransparentColor(ContextCompat.getColor(getApplicationContext(), R.color.md_black_1000), 175)); } } @Override protected void setStatusBarColor() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { if (isApplyThemeOnImgAct()) if (isTranslucentStatusBar() && isTransparencyZero()){ getWindow().setStatusBarColor(ColorPalette.getTransparentColor(getPrimaryColor(), getTransparency())); } else{ getWindow().setStatusBarColor(ColorPalette.getObscuredColor(getPrimaryColor())); } else getWindow().setStatusBarColor(ColorPalette.getTransparentColor( ContextCompat.getColor(getApplicationContext(), R.color.md_black_1000), 175)); } } @Override protected void onSaveInstanceState(@NonNull Bundle outState) { /* if (mViewPager != null) { outState.putBoolean(ISLOCKED_ARG, mViewPager.isLocked()); }*/ super.onSaveInstanceState(outState); } public void toggleSystemUI() { if (fullScreenMode) showSystemUI(); else hideSystemUI(); } private void hideSystemUI() { runOnUiThread(new Runnable() { public void run() { toolbar.animate().translationY(-toolbar.getHeight()).setInterpolator(new AccelerateInterpolator()) .setDuration(200).start(); bottomBar.animate().translationY(+bottomBar.getHeight()).setInterpolator(new AccelerateInterpolator()) .setDuration(200).start(); getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION // hide nav bar | View.SYSTEM_UI_FLAG_FULLSCREEN // hide status bar | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY | View.SYSTEM_UI_FLAG_IMMERSIVE); fullScreenMode = true; changeBackGroundColor(); stopHandler(); //removing any runnable from the message queue } }); } private void setupSystemUI() { toolbar.animate().translationY(Measure.getStatusBarHeight(getResources())).setInterpolator(new DecelerateInterpolator()) .setDuration(0).start(); getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); } private void showSystemUI() { runOnUiThread(new Runnable() { public void run() { toolbar.animate().translationY(Measure.getStatusBarHeight(getResources())).setInterpolator(new DecelerateInterpolator()) .setDuration(240).start(); bottomBar.animate().translationY(0).setInterpolator(new DecelerateInterpolator()).start(); getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); fullScreenMode = false; changeBackGroundColor(); } }); } private void deleteaction(String ButtonDelete){ final AlertDialog.Builder deleteDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); if(favphotomode){ AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.remove_from_favourites, R.string.delete_from_favourites_message, null); ButtonDelete = this.getString(R.string.remove); }else if(!favphotomode && !upoadhis && !trashdis) { AlertDialogsHelper.getTextCheckboxDialog(SingleMediaActivity.this, deleteDialog, R.string.delete, R .string.delete_photo_message, null, "Move to TrashBin", getAccentColor()); ButtonDelete = this.getString(R.string.delete); }else if(upoadhis && !favphotomode && !trashdis){ AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.delete, R .string.delete_photo_message, null); ButtonDelete = this.getString(R.string.delete); }else if(trashdis && !upoadhis && !favphotomode){ AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.delete, R .string.delete_image_bin, null); ButtonDelete = this.getString(R.string.delete); } deleteDialog.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); deleteDialog.setPositiveButton(ButtonDelete.toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (securityObj.isActiveSecurity() && securityObj.isPasswordOnDelete()) { final boolean passco[] = {false}; final AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog (SingleMediaActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { deleteCurrentMedia(); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.show(); passwordDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE|WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams .SOFT_INPUT_STATE_ALWAYS_VISIBLE); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { deleteCurrentMedia(); passwordDialog.dismiss(); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else deleteCurrentMedia(); } }); AlertDialog alertDialog = deleteDialog.create(); alertDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialog); } private void displaydetails(Media media){ final View v = findViewById(R.id.layout_image_description); LinearLayout linearLayout = (LinearLayout)v; if(!allPhotoMode && !favphotomode && !upoadhis){ media = getAlbum().getCurrentMedia(); }else if(allPhotoMode && !favphotomode && !upoadhis){ media = new Media(new File(listAll.get(current_image_pos).getPath())); }else if(!allPhotoMode && favphotomode && !upoadhis){ media = new Media(new File(favouriteslist.get(current_image_pos).getPath())); }else if(!favphotomode && !allPhotoMode && upoadhis){ media = new Media(new File(uploadhistory.get(current_image_pos).getPath())); } final MediaDetailsMap<String,String> mediaDetailsMap = media.getMainDetails(this); LinearLayout linearLayout1 = (LinearLayout) findViewById(R.id.image_desc_top); linearLayout1.setBackgroundColor(getPrimaryColor()); v.setBackgroundColor(getBackgroundColor()); int textColor = getBaseTheme() != ThemeHelper.LIGHT_THEME ? Color.parseColor("#FAFAFA" ): Color .parseColor("#455A64"); /* Getting all the viewgroups and views of the image description layout */ TextView imgDate = (TextView) linearLayout.findViewById(R.id.image_desc_date); imgDate.setTextColor(textColor); TextView imgLocation = (TextView) linearLayout.findViewById(R.id.image_desc_loc); imgLocation.setTextColor(textColor); TextView imgTitle = (TextView) linearLayout.findViewById(R.id.image_desc_title); imgTitle.setTextColor(textColor); TextView imgType = (TextView) linearLayout.findViewById(R.id.image_desc_type); imgType.setTextColor(textColor); TextView imgSize = (TextView) linearLayout.findViewById(R.id.image_desc_size); imgSize.setTextColor(textColor); TextView imgResolution = (TextView) linearLayout.findViewById(R.id.image_desc_res); imgResolution.setTextColor(textColor); TextView imgPath = (TextView) linearLayout.findViewById(R.id.image_desc_path); imgPath.setTextColor(textColor); TextView imgOrientation = (TextView) linearLayout.findViewById(R.id.image_desc_orientation); imgOrientation.setTextColor(textColor); TextView imgExif = (TextView) linearLayout.findViewById(R.id.image_desc_exif); imgExif.setTextColor(textColor); TextView imgDesc = (TextView) linearLayout.findViewById(R.id.image_desc); imgDesc.setTextColor(textColor); IconicsImageView iconicsImageView = (IconicsImageView) linearLayout.findViewById(R.id.date_icon); iconicsImageView.setColor(textColor); IconicsImageView locationicon = (IconicsImageView) linearLayout.findViewById(R.id.loca_icon); locationicon.setColor(textColor); IconicsImageView detailsicon = (IconicsImageView) linearLayout.findViewById(R.id.detail_icon); detailsicon.setColor(textColor); ImageButton imgBack = (ImageButton) linearLayout.findViewById(R.id.img_desc_back_arrow); imgBack.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { viewSwitcher.showPrevious(); details = false; toggleSystemUI(); } }); /*Setting the label text colours*/ TextView datelabel = (TextView) linearLayout.findViewById(R.id.date_label); datelabel.setTextColor(textColor); TextView locationlabel = (TextView) linearLayout.findViewById(R.id.location_label); locationlabel.setTextColor(textColor); TextView detaillabel = (TextView) linearLayout.findViewById(R.id.details_label); detaillabel.setTextColor(textColor); TextView titlelabel = (TextView) linearLayout.findViewById(R.id.title_label); titlelabel.setTextColor(textColor); TextView typelabel = (TextView) linearLayout.findViewById(R.id.type_label); typelabel.setTextColor(textColor); TextView sizelabel = (TextView) linearLayout.findViewById(R.id.size_label); sizelabel.setTextColor(textColor); TextView reslabel = (TextView) linearLayout.findViewById(R.id.resolution_label); reslabel.setTextColor(textColor); TextView pathlabel = (TextView) linearLayout.findViewById(R.id.path_label); pathlabel.setTextColor(textColor); TextView orientationlabel = (TextView) linearLayout.findViewById(R.id.orientation_label); orientationlabel.setTextColor(textColor); TextView exiflabel = (TextView) linearLayout.findViewById(R.id.exif_label); exiflabel.setTextColor(textColor); TextView desclabel = (TextView) linearLayout.findViewById(R.id.description_label); desclabel.setTextColor(textColor); /*Setting the values to all the textViews*/ try { imgDate.setText(mediaDetailsMap.get("Date").toString()); imgTitle.setText(media.getName()); imgType.setText(mediaDetailsMap.get("Type").toUpperCase()); imgSize.setText(StringUtils.humanReadableByteCount(media.getSize(), true)); imgResolution.setText(mediaDetailsMap.get("Resolution")); if(mediaDetailsMap.get("Path").toString().contains(".nomedia")){ imgPath.setText(R.string.deleted_share_image); } else { imgPath.setText(mediaDetailsMap.get("Path").toString()); } imgOrientation.setText(mediaDetailsMap.get("Orientation")); if(mediaDetailsMap.get("Description") == null) { imgDesc.setText(R.string.no_description); } else{ imgDesc.setText(mediaDetailsMap.get("Description")); } if(mediaDetailsMap.get("EXIF") == null){ imgExif.setText(R.string.no_exif_data); } else { imgExif.setText(mediaDetailsMap.get("EXIF")); } if(mediaDetailsMap.get("Location") == null){ imgLocation.setText(R.string.no_location); } else{ imgLocation.setText(mediaDetailsMap.get("Location").toString()); imgLocation.setTextColor(getResources().getColor(R.color.accent_orange, null)); } } catch (Exception e){ //Raised if null values is found, no need to handle } imgLocation.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if(mediaDetailsMap.get("Location")!=null){ Uri gmmIntentUri = Uri.parse("geo:0,0?q="+ mediaDetailsMap.get("Location")); Intent mapIntent = new Intent(Intent.ACTION_VIEW, gmmIntentUri); mapIntent.setPackage("com.google.android.apps.maps"); startActivity(mapIntent); } } }); } private void changeBackGroundColor() { int colorTo; int colorFrom; if (fullScreenMode) { colorFrom = getBackgroundColor(); colorTo = (ContextCompat.getColor(SingleMediaActivity.this, R.color.md_black_1000)); } else { colorFrom = (ContextCompat.getColor(SingleMediaActivity.this, R.color.md_black_1000)); colorTo = getBackgroundColor(); } ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo); colorAnimation.setDuration(240); colorAnimation.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animator) { ActivityBackground.setBackgroundColor((Integer) animator.getAnimatedValue()); } }); colorAnimation.start(); } @Override public void onBackPressed() { if (details) { viewSwitcher.showPrevious(); toggleSystemUI(); details = false; } else super.onBackPressed(); } @Override public void onPause() { super.onPause(); if (isFinishing()){ overridePendingTransition(0, R.anim.media_zoom_out); } } @Override public void singleTap() { toggleSystemUI(); if(slideshow) { handler.removeCallbacks(slideShowRunnable); slideshow=false; } } @Override public void startPostponedTransition() { getWindow().setSharedElementEnterTransition(new ChangeBounds().setDuration(300)); startPostponedEnterTransition(); } private void setSlideShowDialog() { final AlertDialog.Builder slideshowDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final View SlideshowDialogLayout = getLayoutInflater().inflate(R.layout.dialog_slideshow, null); final TextView slideshowDialogTitle = (TextView) SlideshowDialogLayout.findViewById(R.id.slideshow_dialog_title); final CardView slideshowDialogCard = (CardView) SlideshowDialogLayout.findViewById(R.id.slideshow_dialog_card); final EditText editTextTimeInterval = (EditText) SlideshowDialogLayout.findViewById(R.id.slideshow_edittext); slideshowDialogTitle.setBackgroundColor(getPrimaryColor()); slideshowDialogCard.setBackgroundColor(getCardBackgroundColor()); editTextTimeInterval.getBackground().mutate().setColorFilter(getTextColor(), PorterDuff.Mode.SRC_ATOP); editTextTimeInterval.setTextColor(getTextColor()); editTextTimeInterval.setHintTextColor(getSubTextColor()); setCursorDrawableColor(editTextTimeInterval, getTextColor()); slideshowDialog.setView(SlideshowDialogLayout); AlertDialog dialog = slideshowDialog.create(); dialog.setButton(DialogInterface.BUTTON_POSITIVE, getString(R.string.ok).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { String value= editTextTimeInterval.getText().toString(); if(!"".equals(value)) { slideshow=true; int intValue = Integer.parseInt(value); SLIDE_SHOW_INTERVAL = intValue * 1000; if(SLIDE_SHOW_INTERVAL > 1000) { hideSystemUI(); handler.postDelayed(slideShowRunnable, SLIDE_SHOW_INTERVAL); } else Toast.makeText(SingleMediaActivity.this, "Minimum duration is 2 sec", Toast.LENGTH_SHORT).show(); } } }); dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); dialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), dialog); } @Override protected void onDestroy() { super.onDestroy(); handler.removeCallbacks(slideShowRunnable); } private final class LoadImageTask extends AsyncTask<String, Void, Bitmap> { @Override protected Bitmap doInBackground(String... params) { return BitmapUtils.getSampledBitmap(params[0], imageWidth / 4, imageHeight / 4); } @Override protected void onCancelled() { super.onCancelled(); } @TargetApi(Build.VERSION_CODES.HONEYCOMB) @Override protected void onCancelled(Bitmap result) { super.onCancelled(result); } @Override protected void onPreExecute() { super.onPreExecute(); } @Override protected void onPostExecute(Bitmap result) { super.onPostExecute(result); if (mainBitmap != null) { mainBitmap.recycle(); mainBitmap = null; System.gc(); } mainBitmap = result; imgView.setImageBitmap(mainBitmap); } } private void setUpViewPager() { BasicCallBack basicCallBack = new BasicCallBack() { @Override public void callBack(int status, Object data) { toggleSystemUI(); } }; if (!allPhotoMode && !favphotomode) { adapter = new ImageAdapter(getAlbum().getMedia(), basicCallBack, this, this); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { getAlbum().setCurrentPhotoIndex(position); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); invalidateOptionsMenu(); pathForDescription = getAlbum().getMedia().get(position).getPath(); } }); mViewPager.scrollToPosition(getAlbum().getCurrentMediaIndex()); } else if(allPhotoMode && !favphotomode){ adapter = new ImageAdapter(LFMainActivity.listAll, basicCallBack, this, this); getSupportActionBar().setTitle(current_image_pos + 1 + " " + getString(R.string.of) + " " + size_all); mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = listAll.get(position).getPath(); } }); mViewPager.scrollToPosition(current_image_pos); } else if(!allPhotoMode && favphotomode){ adapter = new ImageAdapter(favouriteslist, basicCallBack, this, this); getSupportActionBar().setTitle(current_image_pos + 1 + " " + getString(R.string.of) + " " + size_all); mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = favouriteslist.get(position).getPath(); } }); mViewPager.scrollToPosition(current_image_pos); } mViewPager.setAdapter(adapter); } }
app/src/main/java/org/fossasia/phimpme/gallery/activities/SingleMediaActivity.java
package org.fossasia.phimpme.gallery.activities; import android.animation.ArgbEvaluator; import android.animation.ValueAnimator; import android.annotation.TargetApi; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.res.Configuration; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.graphics.PorterDuff; import android.media.MediaScannerConnection; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.provider.MediaStore; import android.provider.Settings; import android.speech.RecognizerIntent; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.Snackbar; import android.support.v4.content.ContextCompat; import android.support.v4.print.PrintHelper; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatDelegate; import android.support.v7.widget.ActionMenuView; import android.support.v7.widget.CardView; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.Toolbar; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.transition.ChangeBounds; import android.util.DisplayMetrics; import android.util.Log; import android.view.Display; import android.view.Menu; import android.view.MenuItem; import android.view.Surface; import android.view.View; import android.view.WindowManager; import android.view.animation.AccelerateInterpolator; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.view.animation.DecelerateInterpolator; import android.widget.EditText; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import android.widget.ViewSwitcher; import com.bumptech.glide.Glide; import com.mikepenz.community_material_typeface_library.CommunityMaterial; import com.mikepenz.iconics.view.IconicsImageView; import com.yalantis.ucrop.UCrop; import org.fossasia.phimpme.R; import org.fossasia.phimpme.base.SharedMediaActivity; import org.fossasia.phimpme.base.ThemedActivity; import org.fossasia.phimpme.data.local.DatabaseHelper; import org.fossasia.phimpme.data.local.FavouriteImagesModel; import org.fossasia.phimpme.data.local.ImageDescModel; import org.fossasia.phimpme.data.local.TrashBinRealmModel; import org.fossasia.phimpme.data.local.UploadHistoryRealmModel; import org.fossasia.phimpme.editor.CompressImageActivity; import org.fossasia.phimpme.editor.EditImageActivity; import org.fossasia.phimpme.editor.FileUtils; import org.fossasia.phimpme.editor.utils.BitmapUtils; import org.fossasia.phimpme.gallery.SelectAlbumBottomSheet; import org.fossasia.phimpme.gallery.adapters.ImageAdapter; import org.fossasia.phimpme.gallery.data.Album; import org.fossasia.phimpme.gallery.data.AlbumSettings; import org.fossasia.phimpme.gallery.data.Media; import org.fossasia.phimpme.gallery.data.base.MediaDetailsMap; import org.fossasia.phimpme.gallery.util.AlertDialogsHelper; import org.fossasia.phimpme.gallery.util.ColorPalette; import org.fossasia.phimpme.gallery.util.ContentHelper; import org.fossasia.phimpme.gallery.util.Measure; import org.fossasia.phimpme.gallery.util.PreferenceUtil; import org.fossasia.phimpme.gallery.util.SecurityHelper; import org.fossasia.phimpme.gallery.util.StringUtils; import org.fossasia.phimpme.gallery.util.ThemeHelper; import org.fossasia.phimpme.gallery.views.PagerRecyclerView; import org.fossasia.phimpme.share.SharingActivity; import org.fossasia.phimpme.utilities.ActivitySwitchHelper; import org.fossasia.phimpme.utilities.BasicCallBack; import org.fossasia.phimpme.utilities.SnackBarHandler; import java.io.File; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import butterknife.BindView; import butterknife.ButterKnife; import io.realm.Realm; import io.realm.RealmQuery; import io.realm.RealmResults; import static org.fossasia.phimpme.gallery.activities.LFMainActivity.listAll; import static org.fossasia.phimpme.utilities.Utils.promptSpeechInput; /** * Created by dnld on 18/02/16. */ @SuppressWarnings("ResourceAsColor") public class SingleMediaActivity extends SharedMediaActivity implements ImageAdapter.OnSingleTap, ImageAdapter.enterTransition { private static int SLIDE_SHOW_INTERVAL = 5000; private static final String ISLOCKED_ARG = "isLocked"; static final String ACTION_OPEN_ALBUM = "android.intent.action.pagerAlbumMedia"; private static final String ACTION_REVIEW = "com.android.camera.action.REVIEW"; private int REQUEST_CODE_SD_CARD_PERMISSIONS = 42; private ImageAdapter adapter; private PreferenceUtil SP; private RelativeLayout ActivityBackground; private SelectAlbumBottomSheet bottomSheetDialogFragment; private SecurityHelper securityObj; private boolean fullScreenMode, customUri = false; public static final int TAKE_PHOTO_CODE = 8; public static final int ACTION_REQUEST_EDITIMAGE = 9; public static final int ACTION_REQUEST_COMPRESSIMAGE = 13; public static final int ACTION_STICKERS_IMAGE = 10; private Bitmap mainBitmap; private int imageWidth, imageHeight; private String path; private SingleMediaActivity context; public static final String EXTRA_OUTPUT = "extra_output"; public static String pathForDescription; public Boolean allPhotoMode; public Boolean favphotomode; public Boolean upoadhis; private Boolean trashdis; public int all_photo_pos; public int size_all; public int current_image_pos; private Uri uri; private Realm realm; private FavouriteImagesModel fav; private DatabaseHelper databaseHelper; private Handler handler; private Runnable runnable; boolean slideshow = false; private boolean details = false; private ArrayList<Media> favouriteslist; public static Media mediacompress = null; private ArrayList<Media> uploadhistory; private ArrayList<Media> trashbinlistd; ImageDescModel temp; private final int REQ_CODE_SPEECH_INPUT = 100; String voiceInput; EditText editTextDescription; private RelativeLayout relativeLayout; @Nullable @BindView(R.id.view_switcher_single_media) ViewSwitcher viewSwitcher; @Nullable @BindView(R.id.PhotoPager_Layout) View parentView; @Nullable @BindView(R.id.toolbar_bottom) ActionMenuView bottomBar; @Nullable @BindView(R.id.img) ImageView imgView; @Nullable @BindView(R.id.photos_pager) PagerRecyclerView mViewPager; @Nullable @BindView(R.id.toolbar) Toolbar toolbar; Runnable slideShowRunnable = new Runnable() { @Override public void run() { try { if (!allPhotoMode && !favphotomode) { mViewPager.scrollToPosition((getAlbum().getCurrentMediaIndex() + 1) % getAlbum().getMedia().size()); } else if (allPhotoMode && !favphotomode) { mViewPager.scrollToPosition((current_image_pos + 1) % listAll.size()); } else if (favphotomode && !allPhotoMode) { mViewPager.scrollToPosition((current_image_pos + 1) % favouriteslist.size()); } } catch (Exception e) { e.printStackTrace(); } finally { if (getAlbum().getCurrentMediaIndex() + 1 == getAlbum().getMedia().size() - 1) { handler.removeCallbacks(slideShowRunnable); slideshow = false; toggleSystemUI(); } else { handler.postDelayed(this, SLIDE_SHOW_INTERVAL); } } } }; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); supportPostponeEnterTransition(); context = this; setContentView(R.layout.activity_pager); ButterKnife.bind(this); relativeLayout = (RelativeLayout) findViewById(R.id.PhotoPager_Layout); DisplayMetrics metrics = getResources().getDisplayMetrics(); imageWidth = metrics.widthPixels; imageHeight = metrics.heightPixels; handler = new Handler(); runnable = new Runnable() { @Override public void run() { hideSystemUI(); } }; startHandler(); overridePendingTransition(R.anim.media_zoom_in, 0); SP = PreferenceUtil.getInstance(getApplicationContext()); securityObj = new SecurityHelper(SingleMediaActivity.this); favphotomode = getIntent().getBooleanExtra("fav_photos", false); upoadhis = getIntent().getBooleanExtra("uploadhistory", false); trashdis = getIntent().getBooleanExtra("trashbin", false); allPhotoMode = getIntent().getBooleanExtra(getString(R.string.all_photo_mode), false); all_photo_pos = getIntent().getIntExtra(getString(R.string.position), 0); size_all = getIntent().getIntExtra(getString(R.string.allMediaSize), getAlbum().getCount()); if (getIntent().hasExtra("favouriteslist")) { favouriteslist = getIntent().getParcelableArrayListExtra("favouriteslist"); } if (getIntent().hasExtra("datalist")) { uploadhistory = getIntent().getParcelableArrayListExtra("datalist"); } if (getIntent().hasExtra("trashdatalist")) { trashbinlistd = getIntent().getParcelableArrayListExtra("trashdatalist"); } String path2 = getIntent().getStringExtra("path"); pathForDescription = path2; // mViewPager.setLocked(savedInstanceState.getBoolean(ISLOCKED_ARG, false)); try { Album album; if ((getIntent().getAction().equals(Intent.ACTION_VIEW) || getIntent().getAction().equals(ACTION_REVIEW)) && getIntent().getData() != null) { String path = ContentHelper.getMediaPath(getApplicationContext(), getIntent().getData()); pathForDescription = path; File file = null; if (path != null) file = new File(path); if (file != null && file.isFile()) { //the image is stored in the storage album = new Album(getApplicationContext(), file); } else { //try to show with Uri album = new Album(getApplicationContext(), getIntent().getData()); customUri = true; } getAlbums().addAlbum(0, album); } setUpSwitcherAnimation(); initUI(); setupUI(); } catch (Exception e) { e.printStackTrace(); } } private void setUpSwitcherAnimation() { Animation in = AnimationUtils.loadAnimation(this, android.R.anim.fade_in); Animation out = AnimationUtils.loadAnimation(this, android.R.anim.fade_out); viewSwitcher.setInAnimation(in); viewSwitcher.setOutAnimation(out); } private void initUI() { final Menu bottomMenu = bottomBar.getMenu(); getMenuInflater().inflate(R.menu.menu_bottom_view_pager, bottomMenu); if (upoadhis) { bottomMenu.findItem(R.id.action_favourites).setVisible(false); bottomMenu.findItem(R.id.action_edit).setVisible(false); bottomMenu.findItem(R.id.action_compress).setVisible(false); } if (trashdis) { bottomMenu.findItem(R.id.action_favourites).setVisible(false); bottomMenu.findItem(R.id.action_edit).setVisible(false); bottomMenu.findItem(R.id.action_compress).setVisible(false); bottomMenu.findItem(R.id.action_share).setVisible(false); bottomMenu.findItem(R.id.restore_action).setVisible(true); bottomMenu.findItem(R.id.action_details).setVisible(false); //bottomMenu.findItem(R.id.action_delete).setVisible(false); } if (!allPhotoMode && favphotomode) { bottomBar.getMenu().getItem(5).setVisible(false); } for (int i = 0; i < bottomMenu.size(); i++) { bottomMenu.getItem(i).setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { stopHandler(); return onOptionsItemSelected(item); } }); } setSupportActionBar(toolbar); toolbar.bringToFront(); toolbar.setNavigationIcon(getToolbarIcon(CommunityMaterial.Icon.cmd_arrow_left)); toolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onBackPressed(); } }); setRecentApp(getString(R.string.app_name)); setupSystemUI(); final LinearLayoutManager linearLayoutManager = new LinearLayoutManager(ActivitySwitchHelper.getContext(), LinearLayoutManager.HORIZONTAL, false); mViewPager.setLayoutManager(linearLayoutManager); mViewPager.setHasFixedSize(true); mViewPager.setLongClickable(true); getWindow().getDecorView().setOnSystemUiVisibilityChangeListener (new View.OnSystemUiVisibilityChangeListener() { @Override public void onSystemUiVisibilityChange(int visibility) { if ((visibility & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0) showSystemUI(); else hideSystemUI(); } }); BasicCallBack basicCallBack = new BasicCallBack() { @Override public void callBack(int status, Object data) { toggleSystemUI(); } }; if (!allPhotoMode && !favphotomode && !upoadhis && !trashdis) { adapter = new ImageAdapter(getAlbum().getMedia(), basicCallBack, this, this); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum() .getMedia().size()); // toolbar.setTitle((mViewPager.getCurrentItem() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { getAlbum().setCurrentPhotoIndex(position); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); invalidateOptionsMenu(); if (!favsearch(getAlbum().getMedia(position).getPath())) { bottomMenu.findItem(R.id.action_favourites).getIcon().clearColorFilter(); } else { bottomMenu.findItem(R.id.action_favourites).getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); } pathForDescription = getAlbum().getMedia().get(position).getPath(); } }); mViewPager.scrollToPosition(getAlbum().getCurrentMediaIndex()); } else if (allPhotoMode && !favphotomode && !upoadhis && !trashdis) { adapter = new ImageAdapter(LFMainActivity.listAll, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + getString(R.string.of) + " " + size_all); current_image_pos = all_photo_pos; mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); if (!favsearch(listAll.get(current_image_pos).getPath())) { bottomMenu.findItem(R.id.action_favourites).getIcon().clearColorFilter(); } else { bottomMenu.findItem(R.id.action_favourites).getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); } pathForDescription = listAll.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } else if (!allPhotoMode && favphotomode && !upoadhis && !trashdis) { adapter = new ImageAdapter(favouriteslist, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + getString(R.string.of) + " " + size_all); current_image_pos = all_photo_pos; mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = favouriteslist.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } else if (!favphotomode && !allPhotoMode && upoadhis && !trashdis) { adapter = new ImageAdapter(uploadhistory, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + getString(R.string.of) + " " + size_all); current_image_pos = all_photo_pos; mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = uploadhistory.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } else if (trashdis && !upoadhis && !favphotomode && !allPhotoMode) { adapter = new ImageAdapter(trashbinlistd, basicCallBack, this, this); getSupportActionBar().setTitle(all_photo_pos + 1 + " " + "of" + " " + size_all); current_image_pos = all_photo_pos; mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = trashbinlistd.get(position).getPath(); } }); mViewPager.scrollToPosition(all_photo_pos); } Display aa = ((WindowManager) getSystemService(WINDOW_SERVICE)).getDefaultDisplay(); mViewPager.setAdapter(adapter); if (aa.getRotation() == Surface.ROTATION_90) { Configuration configuration = new Configuration(); configuration.orientation = Configuration.ORIENTATION_LANDSCAPE; onConfigurationChanged(configuration); } } private void setupUI() { /**** Theme ****/ toolbar = (Toolbar) findViewById(R.id.toolbar); toolbar.setBackgroundColor( isApplyThemeOnImgAct() ? ColorPalette.getTransparentColor(getPrimaryColor(), getTransparency()) : ColorPalette.getTransparentColor(getDefaultThemeToolbarColor3th(), 175)); toolbar.setPopupTheme(getPopupToolbarStyle()); ActivityBackground = (RelativeLayout) findViewById(R.id.PhotoPager_Layout); ActivityBackground.setBackgroundColor(getBackgroundColor()); setStatusBarColor(); setNavBarColor(); securityObj.updateSecuritySetting(); /**** SETTINGS ****/ if (SP.getBoolean("set_max_luminosity", false)) updateBrightness(1.0F); else try { float brightness = Settings.System.getInt( getContentResolver(), Settings.System.SCREEN_BRIGHTNESS); brightness = brightness == 1.0F ? 255.0F : brightness; updateBrightness(brightness); } catch (Settings.SettingNotFoundException e) { e.printStackTrace(); } if (SP.getBoolean("set_picture_orientation", false)) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR); else setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_USER); } /** * startHandler and stopHandler are helper methods for onUserInteraction, that auto-hides the nav-bars * and switch the activity to full screen, thus giving more better UX. */ private void startHandler() { handler.postDelayed(runnable, 5000); } private void stopHandler() { handler.removeCallbacks(runnable); } @Override public void onUserInteraction() { super.onUserInteraction(); stopHandler(); startHandler(); } @Override public void onResume() { super.onResume(); ActivitySwitchHelper.setContext(this); setupUI(); } @Override protected void onStop() { super.onStop(); stopHandler(); SP.putBoolean("auto_update_media", true); } @Override public void onLowMemory() { super.onLowMemory(); Glide.get(getApplicationContext()).clearMemory(); Glide.get(getApplicationContext()).trimMemory(TRIM_MEMORY_COMPLETE); System.gc(); } @Override public boolean onMenuOpened(int featureId, Menu menu) { if (featureId == AppCompatDelegate.FEATURE_SUPPORT_ACTION_BAR && menu != null) stopHandler(); return super.onMenuOpened(featureId, menu); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_view_pager, menu); return true; } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT); if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) params.setMargins(0, 0, Measure.getNavigationBarSize(SingleMediaActivity.this).x, 0); else params.setMargins(0, 0, 0, 0); toolbar.setLayoutParams(params); setUpViewPager(); } private boolean favsearch(String path) { boolean favis = false; realm = Realm.getDefaultInstance(); RealmResults<FavouriteImagesModel> realmQuery = realm.where(FavouriteImagesModel.class).findAll(); for (int i = 0; i < realmQuery.size(); i++) { if (realmQuery.get(i).getPath().equals(path)) { favis = true; break; } } return favis; } private void performrealmaction(final ImageDescModel descModel, String newpath) { realm = Realm.getDefaultInstance(); int index = descModel.getId().lastIndexOf("/"); String name = descModel.getId().substring(index + 1); String newpathy = newpath + "/" + name; realm.beginTransaction(); ImageDescModel imageDescModel = realm.createObject(ImageDescModel.class, newpathy); imageDescModel.setTitle(descModel.getTitle()); realm.commitTransaction(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<ImageDescModel> result = realm.where(ImageDescModel.class).equalTo ("path", descModel.getId()).findAll(); result.deleteAllFromRealm(); } }); } private void getdescriptionpaths(String patjs, String newpth) { realm = Realm.getDefaultInstance(); RealmQuery<ImageDescModel> realmQuery = realm.where(ImageDescModel.class); for (int i = 0; i < realmQuery.count(); i++) { if (realmQuery.findAll().get(i).getId().equals(patjs)) { performrealmaction(realmQuery.findAll().get(i), newpth); break; } } } @Override public boolean onPrepareOptionsMenu(final Menu menu) { if (allPhotoMode || favphotomode) { menu.findItem(R.id.action_cover).setVisible(false); } if (!allPhotoMode && !favphotomode && !upoadhis && !trashdis) { menu.setGroupVisible(R.id.only_photos_options, true); } else if (!allPhotoMode && favphotomode && !upoadhis && !trashdis) { menu.findItem(R.id.action_copy).setVisible(false); menu.findItem(R.id.rename_photo).setVisible(false); menu.findItem(R.id.action_move).setVisible(false); } else if (!allPhotoMode && !favphotomode && (upoadhis || trashdis)) { menu.findItem(R.id.action_copy).setVisible(false); menu.findItem(R.id.action_move).setVisible(false); menu.findItem(R.id.rename_photo).setVisible(false); menu.findItem(R.id.slide_show).setVisible(false); menu.findItem(R.id.action_use_as).setVisible(false); menu.findItem(R.id.action_cover).setVisible(false); menu.findItem(R.id.action_description).setVisible(false); } if (customUri) { menu.setGroupVisible(R.id.on_internal_storage, false); menu.setGroupVisible(R.id.only_photos_options, false); menu.findItem(R.id.sort_action).setVisible(false); } return true; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQ_CODE_SPEECH_INPUT && data != null) { ArrayList<String> result = data .getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); voiceInput = result.get(0); editTextDescription.setText(editTextDescription.getText().toString().trim() + " " + voiceInput); editTextDescription.setSelection(editTextDescription.length()); return; } if (resultCode == RESULT_OK && requestCode == REQUEST_CODE_SD_CARD_PERMISSIONS) { Uri treeUri = data.getData(); // Persist URI in shared preference so that you can use it later. ContentHelper.saveSdCardInfo(getApplicationContext(), treeUri); getContentResolver().takePersistableUriPermission(treeUri, Intent.FLAG_GRANT_WRITE_URI_PERMISSION); } if (data != null && resultCode == RESULT_OK) { switch (requestCode) { case UCrop.REQUEST_CROP: final Uri imageUri = UCrop.getOutput(data); if (imageUri != null && imageUri.getScheme().equals("file")) { try { //copyFileToDownloads(imageUri); // TODO: 21/08/16 handle this better handleEditorImage(data); if (ContentHelper.copyFile(getApplicationContext(), new File(imageUri.getPath()), new File(getAlbum().getPath()))) { //((ImageFragment) adapter.getRegisteredFragment(getAlbum().getCurrentMediaIndex())).displayMedia(true); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.new_file_created), bottomBar.getHeight()); } //adapter.notifyDataSetChanged(); } catch (Exception e) { Log.e("ERROS - uCrop", imageUri.toString(), e); } } else SnackBarHandler.showWithBottomMargin(parentView, "errori random", bottomBar.getHeight()); break; default: break; } } } private void handleEditorImage(Intent data) { String newFilePath = data.getStringExtra(EditImageActivity.EXTRA_OUTPUT); boolean isImageEdit = data.getBooleanExtra(EditImageActivity.IMAGE_IS_EDIT, false); if (isImageEdit) { } else {//Or use the original unedited pictures newFilePath = data.getStringExtra(EditImageActivity.FILE_PATH); } //System.out.println("newFilePath---->" + newFilePath); //File file = new File(newFilePath); //System.out.println("newFilePath size ---->" + (file.length() / 1024)+"KB"); Log.d("image is edit", isImageEdit + ""); LoadImageTask loadTask = new LoadImageTask(); loadTask.execute(newFilePath); } private void displayAlbums(boolean reload) { Intent i = new Intent(SingleMediaActivity.this, LFMainActivity.class); Bundle b = new Bundle(); b.putInt(SplashScreen.CONTENT, SplashScreen.ALBUMS_PREFETCHED); if (!reload) i.putExtras(b); startActivity(i); finish(); } private void deleteCurrentMedia() { boolean success = false; if (!allPhotoMode && !favphotomode && !upoadhis && !trashdis) { if (AlertDialogsHelper.check) { success = addToTrash(); } else { success = getAlbum().deleteCurrentMedia(getApplicationContext()); } if (!success) { final AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, dialogBuilder, R.string.sd_card_write_permission_title, R.string.sd_card_permissions_message, null); dialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) startActivityForResult(new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE), REQUEST_CODE_SD_CARD_PERMISSIONS); } }); dialogBuilder.show(); } if (getAlbum().getMedia().size() == 0) { if (customUri) finish(); else { getAlbums().removeCurrentAlbum(); displayAlbums(false); } } adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); } else if (allPhotoMode && !favphotomode && !upoadhis && !trashdis) { int c = current_image_pos; if (AlertDialogsHelper.check) { success = addToTrash(); } else { deleteMedia(listAll.get(current_image_pos).getPath()); success = true; } if (success) { LFMainActivity.listAll.remove(current_image_pos); size_all = LFMainActivity.listAll.size(); adapter.notifyDataSetChanged(); //SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_msg)); } if (current_image_pos != size_all) getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); // mViewPager.setCurrentItem(current_image_pos); // toolbar.setTitle((mViewPager.getCurrentItem() + 1) + " " + getString(R.string.of) + " " + size_all); } else if (favphotomode && !allPhotoMode && !upoadhis && !trashdis) { int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<FavouriteImagesModel> favouriteImagesModels = realm.where(FavouriteImagesModel .class).equalTo("path", favouriteslist.get(current_image_pos).getPath()).findAll(); favouriteImagesModels.deleteAllFromRealm(); } }); deleteFromList(favouriteslist.get(current_image_pos).getPath()); size_all = favouriteslist.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } else if (!favphotomode && !allPhotoMode && upoadhis && !trashdis) { int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); if(uploadhistory.get(current_image_pos).getPath().contains(".nomedia")){ File file = new File(uploadhistory.get(current_image_pos).getPath()); if(file.exists()){ file.delete(); } } realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<UploadHistoryRealmModel> uploadHistoryImagesModels = realm.where(UploadHistoryRealmModel .class).equalTo("pathname", uploadhistory.get(current_image_pos).getPath()).findAll(); uploadHistoryImagesModels.deleteAllFromRealm(); } }); deleteFromList(uploadhistory.get(current_image_pos).getPath()); size_all = uploadhistory.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } else if (trashdis && !favphotomode && !upoadhis && !allPhotoMode) { int c = current_image_pos; realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<TrashBinRealmModel> trashBinRealmModels = realm.where(TrashBinRealmModel.class). equalTo("trashbinpath", trashbinlistd.get(current_image_pos).getPath()).findAll(); trashBinRealmModels.deleteAllFromRealm(); } }); deleteFromList(trashbinlistd.get(current_image_pos).getPath()); size_all = trashbinlistd.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((c + 1) + " " + getString(R.string.of) + " " + size_all); //SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } } private void addTrashObjectsToRealm(String mediaPath){ String trashbinpath = Environment.getExternalStorageDirectory() + "/" + ".nomedia"; realm = Realm.getDefaultInstance(); realm.beginTransaction(); String name = mediaPath.substring(mediaPath.lastIndexOf("/") + 1); String trashpath = trashbinpath + "/" + name; TrashBinRealmModel trashBinRealmModel = realm.createObject(TrashBinRealmModel.class, trashpath); trashBinRealmModel.setOldpath(mediaPath); trashBinRealmModel.setDatetime(new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date())); trashBinRealmModel.setTimeperiod("null"); realm.commitTransaction(); } private void deleteFromList(String path){ if(favphotomode){ for (int i = 0; i < favouriteslist.size(); i++){ if(favouriteslist.get(i).getPath().equals(path)){ favouriteslist.remove(i); break; } } } else if(upoadhis){ for (int i = 0; i < uploadhistory.size(); i++){ if(uploadhistory.get(i).getPath().equals(path)){ uploadhistory.remove(i); break; } } } else if(trashdis){ for (int i = 0; i < trashbinlistd.size(); i++){ if(trashbinlistd.get(i).getPath().equals(path)){ trashbinlistd.remove(i); break; } } } } private boolean addToTrash(){ String pathOld = null; String oldpath = null; int no = 0; boolean succ = false; if(!allPhotoMode && !favphotomode && !upoadhis){ oldpath = getAlbum().getCurrentMedia().getPath(); } else if(allPhotoMode && !favphotomode && !upoadhis){ oldpath = listAll.get(current_image_pos).getPath(); } File file = new File(Environment.getExternalStorageDirectory() + "/" + ".nomedia"); if (file.exists() && file.isDirectory()) { if (!allPhotoMode && !favphotomode) { pathOld = getAlbum().getCurrentMedia().getPath(); succ = getAlbum().moveCurrentMedia(getApplicationContext(), file.getAbsolutePath()); } else if (allPhotoMode && !favphotomode){ pathOld = listAll.get(current_image_pos).getPath(); succ = getAlbum().moveAnyMedia(getApplicationContext(), file.getAbsolutePath(), listAll.get (current_image_pos).getPath()); } if (succ) { Snackbar snackbar = SnackBarHandler.showWithBottomMargin2(parentView, getString(R.string .trashbin_move_onefile), navigationView.getHeight (), Snackbar.LENGTH_SHORT); final String finalOldpath = oldpath; snackbar.setAction("UNDO", new View.OnClickListener() { @Override public void onClick(View view) { getAlbum().moveAnyMedia(getApplicationContext(), getAlbum().getPath(), finalOldpath); } }); snackbar.show(); } else { SnackBarHandler.showWithBottomMargin(parentView, String.valueOf(no) + " " + getString(R.string .trashbin_move_error), navigationView.getHeight ()); } } else { if (file.mkdir()) { if (!allPhotoMode && !favphotomode) { pathOld = getAlbum().getCurrentMedia().getPath(); succ = getAlbum().moveCurrentMedia(getApplicationContext(), file.getAbsolutePath()); } else if (allPhotoMode && !favphotomode) { pathOld = getAlbum().getCurrentMedia().getPath(); succ = getAlbum().moveAnyMedia(getApplicationContext(), file.getAbsolutePath(), listAll.get (current_image_pos).getPath()); } if (succ) { SnackBarHandler.showWithBottomMargin(parentView, String.valueOf(no) + " " + getString(R.string .trashbin_move_onefile), navigationView.getHeight ()); } else { SnackBarHandler.showWithBottomMargin(parentView, String.valueOf(no) + " " + getString(R.string .trashbin_move_error), navigationView.getHeight ()); } } } addTrashObjectsToRealm(pathOld); return succ; } private void deleteMedia(String path) { String[] projection = {MediaStore.Images.Media._ID}; // Match on the file path String selection = MediaStore.Images.Media.DATA + " = ?"; String[] selectionArgs = new String[]{path}; // Query for the ID of the media matching the file path Uri queryUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; ContentResolver contentResolver = getContentResolver(); Cursor c = contentResolver.query(queryUri, projection, selection, selectionArgs, null); if (c.moveToFirst()) { // We found the ID. Deleting the item via the content provider will also remove the file long id = c.getLong(c.getColumnIndexOrThrow(MediaStore.Images.Media._ID)); Uri deleteUri = ContentUris.withAppendedId(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, id); contentResolver.delete(deleteUri, null, null); } c.close(); } private void deletefav(final String path){ realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<FavouriteImagesModel> favouriteImagesModels = realm.where(FavouriteImagesModel .class).equalTo("path", path).findAll(); favouriteImagesModels.deleteAllFromRealm(); } }); } private void deletefromfav(final MenuItem item){ String ButtonDelete = ""; final AlertDialog.Builder deleteDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.remove_from_favourites, R.string.delete_from_favourites_message, null); ButtonDelete = this.getString(R.string.remove); deleteDialog.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); deleteDialog.setPositiveButton(ButtonDelete.toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (securityObj.isActiveSecurity() && securityObj.isPasswordOnDelete()) { final boolean passco[] = {false}; final AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog (SingleMediaActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { //int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); item.getIcon().clearColorFilter(); deletefav(getAlbum().getCurrentMedia().getPath()); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.show(); passwordDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE|WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams .SOFT_INPUT_STATE_ALWAYS_VISIBLE); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { // int c = current_image_pos; //deleteMedia(favouriteslist.get(current_image_pos).getPath()); passwordDialog.dismiss(); item.getIcon().clearColorFilter(); SnackBarHandler.show(parentView, getApplicationContext().getString(R .string.photo_deleted_from_fav_msg)); deletefav(getAlbum().getCurrentMedia().getPath()); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else{ item.getIcon().clearColorFilter(); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); //deleteMedia(favouriteslist.get(current_image_pos).getPath()); deletefav(getAlbum().getCurrentMedia().getPath()); } } }); AlertDialog alertDialog = deleteDialog.create(); alertDialog.show(); SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialog); } @Override public boolean onOptionsItemSelected(final MenuItem item) { switch (item.getItemId()) { case android.R.id.home: supportFinishAfterTransition(); return true; case R.id.action_copy: handler.removeCallbacks(slideShowRunnable); bottomSheetDialogFragment = new SelectAlbumBottomSheet(); bottomSheetDialogFragment.setTitle(getString(R.string.copy_to)); bottomSheetDialogFragment.setSelectAlbumInterface(new SelectAlbumBottomSheet.SelectAlbumInterface() { @Override public void folderSelected(String path) { File file = new File(path + "/" + getAlbum().getCurrentMedia().getName() + getAlbum() .getCurrentMedia().getPath().substring (getAlbum().getCurrentMedia().getPath().lastIndexOf("."))); if (file.exists()) { bottomSheetDialogFragment.dismiss(); } else { getAlbum().copyPhoto(getApplicationContext(), getAlbum().getCurrentMedia().getPath(), path); bottomSheetDialogFragment.dismiss(); SnackBarHandler.showWithBottomMargin(relativeLayout, getString(R.string.copied_successfully) + " to " + path, bottomBar.getHeight()); } } }); bottomSheetDialogFragment.show(getSupportFragmentManager(), bottomSheetDialogFragment.getTag()); break; case R.id.action_share: handler.removeCallbacks(slideShowRunnable); Intent share = new Intent(SingleMediaActivity.this, SharingActivity.class); if (!allPhotoMode) share.putExtra(EXTRA_OUTPUT, getAlbum().getCurrentMedia().getPath()); else share.putExtra(EXTRA_OUTPUT, listAll.get(current_image_pos).getPath()); startActivity(share); return true; case R.id.action_edit: handler.removeCallbacks(slideShowRunnable); if (!allPhotoMode && !favphotomode) { uri = Uri.fromFile(new File(getAlbum().getCurrentMedia().getPath())); } else if (allPhotoMode && !favphotomode) { uri = Uri.fromFile(new File(listAll.get(current_image_pos).getPath())); } else if (!allPhotoMode && favphotomode) { uri = Uri.fromFile(new File(favouriteslist.get(current_image_pos).getPath())); } final String extension = uri.getPath(); if (extension != null && !(extension.substring(extension.lastIndexOf(".")).equals(".gif"))) { Intent editIntent = new Intent(SingleMediaActivity.this, EditImageActivity.class); editIntent.putExtra("extra_input", uri.getPath()); editIntent.putExtra("extra_output", FileUtils.genEditFile(FileUtils.getExtension(extension)).getAbsolutePath()); editIntent.putExtra("requestCode", ACTION_REQUEST_EDITIMAGE); startActivity(editIntent); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.image_invalid), bottomBar.getHeight()); break; case R.id.action_use_as: handler.removeCallbacks(slideShowRunnable); Intent intent = new Intent(Intent.ACTION_ATTACH_DATA); if (!allPhotoMode) intent.setDataAndType( getAlbum().getCurrentMedia().getUri(), getAlbum().getCurrentMedia().getMimeType()); else intent.setDataAndType(Uri.fromFile(new File(listAll.get(current_image_pos).getPath())), StringUtils.getMimeType(listAll.get(current_image_pos).getPath())); startActivity(Intent.createChooser(intent, getString(R.string.use_as))); return true; case R.id.print: PrintHelper photoPrinter = new PrintHelper(this); photoPrinter.setScaleMode(PrintHelper.SCALE_MODE_FIT); Bitmap bitmap = BitmapFactory.decodeFile(getAlbum().getCurrentMedia().getPath(), new BitmapFactory.Options()); photoPrinter.printBitmap(getString(R.string.print), bitmap); return true; case R.id.rename_photo: String currentpath = null; if (!allPhotoMode) { currentpath = getAlbum().getCurrentMedia().getPath(); } else { currentpath = listAll.get(current_image_pos).getPath(); } final File file = new File(currentpath); int indexofdot = file.getPath().lastIndexOf("."); int indert = file.getPath().lastIndexOf("/"); String namefile = file.getPath().substring(indert + 1, indexofdot); final String imageextension = file.getPath().substring(indexofdot + 1); AlertDialog.Builder renameDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextNewName = new EditText(getApplicationContext()); editTextNewName.setText(namefile); editTextNewName.setSelectAllOnFocus(true); editTextNewName.setHint(R.string.description_hint); editTextNewName.setHintTextColor(ContextCompat.getColor(getApplicationContext(), R.color.grey)); editTextNewName.setHighlightColor(ContextCompat.getColor(getApplicationContext(), R.color.cardview_shadow_start_color)); editTextNewName.selectAll(); editTextNewName.setSingleLine(false); AlertDialogsHelper.getInsertTextDialog(SingleMediaActivity.this, renameDialogBuilder, editTextNewName, R.string.rename_image, null); renameDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); renameDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should br empty it will be overwrite later //to avoid dismiss of the dialog } }); final AlertDialog renameDialog = renameDialogBuilder.create(); renameDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_IS_FORWARD_NAVIGATION); editTextNewName.setSelection(editTextNewName.getText().toString().length()); renameDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface .BUTTON_NEGATIVE}, getAccentColor(), renameDialog); renameDialog.getButton(AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, ContextCompat .getColor(SingleMediaActivity.this, R.color.grey), renameDialog); editTextNewName.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if (TextUtils.isEmpty(editable)) { // Disable ok button renameDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, ContextCompat.getColor(SingleMediaActivity.this, R.color.grey), renameDialog); } else { // Something into edit text. Enable the button. renameDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(true); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), renameDialog); } } }); renameDialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View dialog) { if (editTextNewName.length() != 0) { int index = file.getPath().lastIndexOf("/"); String path = file.getPath().substring(0, index); File newname = new File(path + "/" + editTextNewName.getText().toString() + "." + imageextension); if (file.renameTo(newname)) { ContentResolver resolver = getApplicationContext().getContentResolver(); resolver.delete( MediaStore.Images.Media.EXTERNAL_CONTENT_URI, MediaStore.Images.Media.DATA + "=?", new String[]{file.getAbsolutePath()}); Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE); intent.setData(Uri.fromFile(newname)); getApplicationContext().sendBroadcast(intent); } if (!allPhotoMode) { int a = getAlbum().getCurrentMediaIndex(); getAlbum().getMedia(a).setPath(newname.getPath()); } else { listAll.get(current_image_pos).setPath(newname.getPath()); } renameDialog.dismiss(); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.rename_succes), navigationView .getHeight()); } } }); return true; case R.id.action_favourites: realm = Realm.getDefaultInstance(); String realpath = getAlbum().getCurrentMedia().getPath(); RealmQuery<FavouriteImagesModel> query = realm.where(FavouriteImagesModel.class).equalTo("path", realpath); if (query.count() == 0) { realm.beginTransaction(); fav = realm.createObject(FavouriteImagesModel.class, realpath); ImageDescModel q = realm.where(ImageDescModel.class).equalTo("path", realpath).findFirst(); if (q != null) { fav.setDescription(q.getTitle()); } else { fav.setDescription(" "); } item.getIcon().setColorFilter(getAccentColor(), PorterDuff.Mode.SRC_IN); realm.commitTransaction(); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.add_favourite), bottomBar.getHeight()); } else { deletefromfav(item); } break; case R.id.action_compress: handler.removeCallbacks(slideShowRunnable); if (!allPhotoMode) uri = Uri.fromFile(new File(getAlbum().getCurrentMedia().getPath())); else uri = Uri.fromFile(new File(listAll.get(current_image_pos).getPath())); String extension1 = uri.getPath(); if (extension1 != null && !(extension1.substring(extension1.lastIndexOf(".")).equals(".gif"))) { Intent compressIntent = new Intent(SingleMediaActivity.this, CompressImageActivity.class); if (!allPhotoMode) compressIntent.putExtra(EXTRA_OUTPUT, getAlbum().getCurrentMedia().getPath()); else compressIntent.putExtra(EXTRA_OUTPUT, listAll.get(current_image_pos).getPath()); startActivity(compressIntent); //to send the resolution of image handler.removeCallbacks(slideShowRunnable); if (!allPhotoMode && !favphotomode) { mediacompress = getAlbum().getCurrentMedia(); } else if (allPhotoMode && !favphotomode) { mediacompress = new Media(new File(listAll.get(current_image_pos).getPath())); } else if (!allPhotoMode && favphotomode) { mediacompress = new Media(new File(favouriteslist.get(current_image_pos).getPath())); } } else SnackBarHandler.show(parentView, R.string.image_invalid); break; case R.id.action_delete: String ButtonDelete = ""; handler.removeCallbacks(slideShowRunnable); deleteaction(ButtonDelete); return true; case R.id.slide_show: handler.removeCallbacks(slideShowRunnable); setSlideShowDialog(); return true; case R.id.action_move: final String pathcurrent = getAlbum().getCurrentMedia().getPath(); handler.removeCallbacks(slideShowRunnable); bottomSheetDialogFragment = new SelectAlbumBottomSheet(); bottomSheetDialogFragment.setTitle(getString(R.string.move_to)); bottomSheetDialogFragment.setSelectAlbumInterface(new SelectAlbumBottomSheet.SelectAlbumInterface() { @Override public void folderSelected(String path) { getAlbum().moveCurrentMedia(getApplicationContext(), path); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); if (getAlbum().getMedia().size() == 0) { if (customUri) finish(); else { getAlbums().removeCurrentAlbum(); displayAlbums(false); } } adapter.notifyDataSetChanged(); getdescriptionpaths(pathcurrent, path); // toolbar.setTitle((mViewPager.getCurrentItem() + 1) + " " + getString(R.string.of) + " " + getAlbum().getCount()); bottomSheetDialogFragment.dismiss(); SnackBarHandler.showWithBottomMargin(relativeLayout, getString(R.string.photo_moved_successfully) + " to " + path, bottomBar.getHeight()); } }); bottomSheetDialogFragment.show(getSupportFragmentManager(), bottomSheetDialogFragment.getTag()); return true; case R.id.action_cover: AlbumSettings albumSettings = AlbumSettings.getSettings(getApplicationContext(), getAlbum()); albumSettings.changeCoverPath(getApplicationContext(), getAlbum().getCurrentMedia().getPath()); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.change_cover), bottomBar.getHeight()); return true; case R.id.action_details: Media media = null; handler.removeCallbacks(slideShowRunnable); details = true; displaydetails(media); toggleSystemUI(); viewSwitcher.showNext(); break; case R.id.action_settings: handler.removeCallbacks(slideShowRunnable); startActivity(new Intent(getApplicationContext(), SettingsActivity.class)); break; case R.id.restore_action: String button = ""; final AlertDialog.Builder deleteDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.restore, R .string.restore_image, null); button = this.getString(R.string.restore); deleteDialog.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); deleteDialog.setPositiveButton(button.toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (securityObj.isActiveSecurity() && securityObj.isPasswordOnDelete()) { final boolean passco[] = {false}; final AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog (SingleMediaActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { restoreImage(trashbinlistd.get(current_image_pos).getPath()); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if (securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]) { securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else { passco[0] = false; } } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.show(); passwordDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams .SOFT_INPUT_STATE_ALWAYS_VISIBLE); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { restoreImage(trashbinlistd.get(current_image_pos).getPath()); passwordDialog.dismiss(); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else restoreImage(trashbinlistd.get(current_image_pos).getPath()); } }); AlertDialog alertDialog = deleteDialog.create(); alertDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialog); return true; case R.id.action_description: handler.removeCallbacks(slideShowRunnable); AlertDialog.Builder descriptionDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); editTextDescription = getDescriptionDialog(SingleMediaActivity.this, descriptionDialogBuilder); editTextDescription.setSelectAllOnFocus(true); editTextDescription.setHighlightColor(ContextCompat.getColor(getApplicationContext(), R.color .cardview_shadow_start_color)); editTextDescription.selectAll(); editTextDescription.setSingleLine(false); editTextDescription.setHintTextColor(getResources().getColor(R.color.grey, null)); descriptionDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); descriptionDialogBuilder.setPositiveButton((temp != null && temp.getTitle().length() != 0) ? getString(R.string.update_action) : getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This should br empty it will be overwrite later } }); descriptionDialogBuilder.setNeutralButton(getString(R.string.delete).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //This will be overwrite later } }); final AlertDialog descriptionDialog = descriptionDialogBuilder.create(); descriptionDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface .BUTTON_NEGATIVE}, getAccentColor(), descriptionDialog); descriptionDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE|WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); descriptionDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); descriptionDialog.getButton(AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getColor(R.color.grey), descriptionDialog); if(temp == null){ descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setTextColor(getColor(R.color.grey)); descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setEnabled(false); } else { descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setTextColor(getAccentColor()); descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setEnabled(true); } editTextDescription.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if (TextUtils.isEmpty(editable)) { // Disable ok button descriptionDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(false); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getColor(R.color.grey), descriptionDialog); } else { // Something into edit text. Enable the button. descriptionDialog.getButton( AlertDialog.BUTTON_POSITIVE).setEnabled(true); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), descriptionDialog); } } }); descriptionDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { descriptionDialog.dismiss(); voiceInput = editTextDescription.getText().toString(); if (temp == null) { databaseHelper.addImageDesc(new ImageDescModel(pathForDescription, editTextDescription.getText().toString())); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.description_saved), bottomBar.getHeight()); } else { databaseHelper.update(new ImageDescModel(pathForDescription, editTextDescription.getText().toString())); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.description_updated), bottomBar.getHeight()); } } }); descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (temp == null){ descriptionDialog.getButton(DialogInterface.BUTTON_NEUTRAL).setEnabled(false); } else{ descriptionDialog.dismiss(); databaseHelper.delete(temp); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.description_deleted), bottomBar.getHeight()); } } }); break; default: // If we got here, the user's action was not recognized. // Invoke the superclass to handle it. //return super.onOptionsItemSelected(item); } return super.onOptionsItemSelected(item); } private void restoreImage(String path){ realm = Realm.getDefaultInstance(); RealmResults<TrashBinRealmModel> trashBinRealmModels = realm.where(TrashBinRealmModel.class). equalTo("trashbinpath", path).findAll(); String oldpath = trashBinRealmModels.get(0).getOldpath(); String oldFolder = oldpath.substring(0, oldpath.lastIndexOf("/")); if(restoreMove(context, trashBinRealmModels.get(0).getTrashbinpath(), oldFolder)){ scanFile(context, new String[]{ trashBinRealmModels.get(0).getTrashbinpath(), StringUtils.getPhotoPathMoved (trashBinRealmModels.get(0).getTrashbinpath(), oldFolder) }); if( removeFromRealm(trashBinRealmModels.get(0).getTrashbinpath())){ deleteFromList(trashbinlistd.get(current_image_pos).getPath()); size_all = trashbinlistd.size(); if (size_all > 0) { adapter.notifyDataSetChanged(); getSupportActionBar().setTitle((current_image_pos + 1) + " " + getString(R.string.of) + " " + size_all); //SnackBarHandler.show(parentView, getApplicationContext().getString(R.string.photo_deleted_from_fav_msg)); } else { onBackPressed(); } } } } public void scanFile(Context context, String[] path) { MediaScannerConnection.scanFile(context, path, null, null); } private boolean restoreMove(Context context, String source, String targetDir){ File from = new File(source); File to = new File(targetDir); return ContentHelper.moveFile(context, from, to); } private boolean removeFromRealm(final String path){ final boolean[] delete = {false}; Realm realm = Realm.getDefaultInstance(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { RealmResults<TrashBinRealmModel> result = realm.where(TrashBinRealmModel.class).equalTo ("trashbinpath", path).findAll(); delete[0] = result.deleteAllFromRealm(); } }); return delete[0]; } public EditText getDescriptionDialog(final ThemedActivity activity, AlertDialog.Builder descriptionDialog) { final View DescriptiondDialogLayout = activity.getLayoutInflater().inflate(R.layout.dialog_description, null); final TextView DescriptionDialogTitle = (TextView) DescriptiondDialogLayout.findViewById(R.id.description_dialog_title); final CardView DescriptionDialogCard = (CardView) DescriptiondDialogLayout.findViewById(R.id.description_dialog_card); EditText editxtDescription = (EditText) DescriptiondDialogLayout.findViewById(R.id.description_edittxt); ImageButton VoiceRecognition = (ImageButton) DescriptiondDialogLayout.findViewById(R.id.voice_input); VoiceRecognition.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { promptSpeechInput(SingleMediaActivity.this, REQ_CODE_SPEECH_INPUT, parentView, getString(R.string.speech_prompt)); } }); DescriptionDialogTitle.setBackgroundColor(activity.getPrimaryColor()); DescriptionDialogCard.setBackgroundColor(activity.getCardBackgroundColor()); ThemeHelper.setCursorDrawableColor(editxtDescription, activity.getTextColor()); editxtDescription.getBackground().mutate().setColorFilter(activity.getTextColor(), PorterDuff.Mode.SRC_ATOP); editxtDescription.setTextColor(activity.getTextColor()); realm = Realm.getDefaultInstance(); databaseHelper = new DatabaseHelper(realm); temp = databaseHelper.getImageDesc(pathForDescription); if (temp != null && temp.getTitle().length() != 0) { editxtDescription.setText(temp.getTitle()); editxtDescription.setSelection(editxtDescription.getText().length()); //Toast.makeText(SingleMediaActivity.this, voiceInput, Toast.LENGTH_SHORT).show(); } descriptionDialog.setView(DescriptiondDialogLayout); return editxtDescription; } private void updateBrightness(float level) { WindowManager.LayoutParams lp = getWindow().getAttributes(); lp.screenBrightness = level; getWindow().setAttributes(lp); } @SuppressWarnings("ResourceAsColor") private UCrop.Options getUcropOptions() { UCrop.Options options = new UCrop.Options(); options.setCompressionFormat(Bitmap.CompressFormat.PNG); options.setCompressionQuality(90); options.setActiveWidgetColor(getAccentColor()); options.setToolbarColor(getPrimaryColor()); options.setStatusBarColor(isTranslucentStatusBar() ? ColorPalette.getObscuredColor(getPrimaryColor()) : getPrimaryColor()); options.setCropFrameColor(getAccentColor()); options.setFreeStyleCropEnabled(true); return options; } @Override public void setNavBarColor() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { if (isApplyThemeOnImgAct()) if (isNavigationBarColored()) getWindow().setNavigationBarColor(ColorPalette.getTransparentColor(ColorPalette.getObscuredColor(getPrimaryColor()), getTransparency())); else getWindow().setNavigationBarColor(ColorPalette.getTransparentColor(ContextCompat.getColor(getApplicationContext(), R.color.md_black_1000), getTransparency())); else getWindow().setNavigationBarColor(ColorPalette.getTransparentColor(ContextCompat.getColor(getApplicationContext(), R.color.md_black_1000), 175)); } } @Override protected void setStatusBarColor() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { if (isApplyThemeOnImgAct()) if (isTranslucentStatusBar() && isTransparencyZero()){ getWindow().setStatusBarColor(ColorPalette.getTransparentColor(getPrimaryColor(), getTransparency())); } else{ getWindow().setStatusBarColor(ColorPalette.getObscuredColor(getPrimaryColor())); } else getWindow().setStatusBarColor(ColorPalette.getTransparentColor( ContextCompat.getColor(getApplicationContext(), R.color.md_black_1000), 175)); } } @Override protected void onSaveInstanceState(@NonNull Bundle outState) { /* if (mViewPager != null) { outState.putBoolean(ISLOCKED_ARG, mViewPager.isLocked()); }*/ super.onSaveInstanceState(outState); } public void toggleSystemUI() { if (fullScreenMode) showSystemUI(); else hideSystemUI(); } private void hideSystemUI() { runOnUiThread(new Runnable() { public void run() { toolbar.animate().translationY(-toolbar.getHeight()).setInterpolator(new AccelerateInterpolator()) .setDuration(200).start(); bottomBar.animate().translationY(+bottomBar.getHeight()).setInterpolator(new AccelerateInterpolator()) .setDuration(200).start(); getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION // hide nav bar | View.SYSTEM_UI_FLAG_FULLSCREEN // hide status bar | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY | View.SYSTEM_UI_FLAG_IMMERSIVE); fullScreenMode = true; changeBackGroundColor(); stopHandler(); //removing any runnable from the message queue } }); } private void setupSystemUI() { toolbar.animate().translationY(Measure.getStatusBarHeight(getResources())).setInterpolator(new DecelerateInterpolator()) .setDuration(0).start(); getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); } private void showSystemUI() { runOnUiThread(new Runnable() { public void run() { toolbar.animate().translationY(Measure.getStatusBarHeight(getResources())).setInterpolator(new DecelerateInterpolator()) .setDuration(240).start(); bottomBar.animate().translationY(0).setInterpolator(new DecelerateInterpolator()).start(); getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); fullScreenMode = false; changeBackGroundColor(); } }); } private void deleteaction(String ButtonDelete){ final AlertDialog.Builder deleteDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); if(favphotomode){ AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.remove_from_favourites, R.string.delete_from_favourites_message, null); ButtonDelete = this.getString(R.string.remove); }else if(!favphotomode && !upoadhis && !trashdis) { AlertDialogsHelper.getTextCheckboxDialog(SingleMediaActivity.this, deleteDialog, R.string.delete, R .string.delete_photo_message, null, "Move to TrashBin", getAccentColor()); ButtonDelete = this.getString(R.string.delete); }else if(upoadhis && !favphotomode && !trashdis){ AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.delete, R .string.delete_photo_message, null); ButtonDelete = this.getString(R.string.delete); }else if(trashdis && !upoadhis && !favphotomode){ AlertDialogsHelper.getTextDialog(SingleMediaActivity.this, deleteDialog, R.string.delete, R .string.delete_image_bin, null); ButtonDelete = this.getString(R.string.delete); } deleteDialog.setNegativeButton(this.getString(R.string.cancel).toUpperCase(), null); deleteDialog.setPositiveButton(ButtonDelete.toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (securityObj.isActiveSecurity() && securityObj.isPasswordOnDelete()) { final boolean passco[] = {false}; final AlertDialog.Builder passwordDialogBuilder = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final EditText editTextPassword = securityObj.getInsertPasswordDialog (SingleMediaActivity.this, passwordDialogBuilder); editTextPassword.setHintTextColor(getResources().getColor(R.color.grey, null)); passwordDialogBuilder.setPositiveButton(getString(R.string.ok_action).toUpperCase(), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { deleteCurrentMedia(); } else SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); } }); editTextPassword.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { //empty method body } @Override public void afterTextChanged(Editable editable) { if(securityObj.getTextInputLayout().getVisibility() == View.VISIBLE && !passco[0]){ securityObj.getTextInputLayout().setVisibility(View.INVISIBLE); } else{ passco[0]=false; } } }); passwordDialogBuilder.setNegativeButton(getString(R.string.cancel).toUpperCase(), null); final AlertDialog passwordDialog = passwordDialogBuilder.create(); passwordDialog.show(); passwordDialog.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE|WindowManager .LayoutParams.FLAG_ALT_FOCUSABLE_IM); passwordDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams .SOFT_INPUT_STATE_ALWAYS_VISIBLE); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), passwordDialog); passwordDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(new View .OnClickListener() { @Override public void onClick(View v) { if (securityObj.checkPassword(editTextPassword.getText().toString())) { deleteCurrentMedia(); passwordDialog.dismiss(); } else { passco[0] = true; securityObj.getTextInputLayout().setVisibility(View.VISIBLE); SnackBarHandler.showWithBottomMargin(parentView, getString(R.string.wrong_password), bottomBar.getHeight()); editTextPassword.getText().clear(); editTextPassword.requestFocus(); } } }); } else deleteCurrentMedia(); } }); AlertDialog alertDialog = deleteDialog.create(); alertDialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE, DialogInterface.BUTTON_NEGATIVE}, getAccentColor(), alertDialog); } private void displaydetails(Media media){ final View v = findViewById(R.id.layout_image_description); LinearLayout linearLayout = (LinearLayout)v; if(!allPhotoMode && !favphotomode && !upoadhis){ media = getAlbum().getCurrentMedia(); }else if(allPhotoMode && !favphotomode && !upoadhis){ media = new Media(new File(listAll.get(current_image_pos).getPath())); }else if(!allPhotoMode && favphotomode && !upoadhis){ media = new Media(new File(favouriteslist.get(current_image_pos).getPath())); }else if(!favphotomode && !allPhotoMode && upoadhis){ media = new Media(new File(uploadhistory.get(current_image_pos).getPath())); } final MediaDetailsMap<String,String> mediaDetailsMap = media.getMainDetails(this); LinearLayout linearLayout1 = (LinearLayout) findViewById(R.id.image_desc_top); linearLayout1.setBackgroundColor(getPrimaryColor()); v.setBackgroundColor(getBackgroundColor()); int textColor = getBaseTheme() != ThemeHelper.LIGHT_THEME ? Color.parseColor("#FAFAFA" ): Color .parseColor("#455A64"); /* Getting all the viewgroups and views of the image description layout */ TextView imgDate = (TextView) linearLayout.findViewById(R.id.image_desc_date); imgDate.setTextColor(textColor); TextView imgLocation = (TextView) linearLayout.findViewById(R.id.image_desc_loc); imgLocation.setTextColor(textColor); TextView imgTitle = (TextView) linearLayout.findViewById(R.id.image_desc_title); imgTitle.setTextColor(textColor); TextView imgType = (TextView) linearLayout.findViewById(R.id.image_desc_type); imgType.setTextColor(textColor); TextView imgSize = (TextView) linearLayout.findViewById(R.id.image_desc_size); imgSize.setTextColor(textColor); TextView imgResolution = (TextView) linearLayout.findViewById(R.id.image_desc_res); imgResolution.setTextColor(textColor); TextView imgPath = (TextView) linearLayout.findViewById(R.id.image_desc_path); imgPath.setTextColor(textColor); TextView imgOrientation = (TextView) linearLayout.findViewById(R.id.image_desc_orientation); imgOrientation.setTextColor(textColor); TextView imgExif = (TextView) linearLayout.findViewById(R.id.image_desc_exif); imgExif.setTextColor(textColor); TextView imgDesc = (TextView) linearLayout.findViewById(R.id.image_desc); imgDesc.setTextColor(textColor); IconicsImageView iconicsImageView = (IconicsImageView) linearLayout.findViewById(R.id.date_icon); iconicsImageView.setColor(textColor); IconicsImageView locationicon = (IconicsImageView) linearLayout.findViewById(R.id.loca_icon); locationicon.setColor(textColor); IconicsImageView detailsicon = (IconicsImageView) linearLayout.findViewById(R.id.detail_icon); detailsicon.setColor(textColor); ImageButton imgBack = (ImageButton) linearLayout.findViewById(R.id.img_desc_back_arrow); imgBack.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { viewSwitcher.showPrevious(); details = false; toggleSystemUI(); } }); /*Setting the label text colours*/ TextView datelabel = (TextView) linearLayout.findViewById(R.id.date_label); datelabel.setTextColor(textColor); TextView locationlabel = (TextView) linearLayout.findViewById(R.id.location_label); locationlabel.setTextColor(textColor); TextView detaillabel = (TextView) linearLayout.findViewById(R.id.details_label); detaillabel.setTextColor(textColor); TextView titlelabel = (TextView) linearLayout.findViewById(R.id.title_label); titlelabel.setTextColor(textColor); TextView typelabel = (TextView) linearLayout.findViewById(R.id.type_label); typelabel.setTextColor(textColor); TextView sizelabel = (TextView) linearLayout.findViewById(R.id.size_label); sizelabel.setTextColor(textColor); TextView reslabel = (TextView) linearLayout.findViewById(R.id.resolution_label); reslabel.setTextColor(textColor); TextView pathlabel = (TextView) linearLayout.findViewById(R.id.path_label); pathlabel.setTextColor(textColor); TextView orientationlabel = (TextView) linearLayout.findViewById(R.id.orientation_label); orientationlabel.setTextColor(textColor); TextView exiflabel = (TextView) linearLayout.findViewById(R.id.exif_label); exiflabel.setTextColor(textColor); TextView desclabel = (TextView) linearLayout.findViewById(R.id.description_label); desclabel.setTextColor(textColor); /*Setting the values to all the textViews*/ try { imgDate.setText(mediaDetailsMap.get("Date").toString()); imgTitle.setText(media.getName()); imgType.setText(mediaDetailsMap.get("Type").toUpperCase()); imgSize.setText(StringUtils.humanReadableByteCount(media.getSize(), true)); imgResolution.setText(mediaDetailsMap.get("Resolution")); if(mediaDetailsMap.get("Path").toString().contains(".nomedia")){ imgPath.setText(R.string.deleted_share_image); } else { imgPath.setText(mediaDetailsMap.get("Path").toString()); } imgOrientation.setText(mediaDetailsMap.get("Orientation")); if(mediaDetailsMap.get("Description") == null) { imgDesc.setText(R.string.no_description); } else{ imgDesc.setText(mediaDetailsMap.get("Description")); } if(mediaDetailsMap.get("EXIF") == null){ imgExif.setText(R.string.no_exif_data); } else { imgExif.setText(mediaDetailsMap.get("EXIF")); } if(mediaDetailsMap.get("Location") == null){ imgLocation.setText(R.string.no_location); } else{ imgLocation.setText(mediaDetailsMap.get("Location").toString()); imgLocation.setTextColor(getResources().getColor(R.color.accent_orange, null)); } } catch (Exception e){ //Raised if null values is found, no need to handle } imgLocation.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if(mediaDetailsMap.get("Location")!=null){ Uri gmmIntentUri = Uri.parse("geo:0,0?q="+ mediaDetailsMap.get("Location")); Intent mapIntent = new Intent(Intent.ACTION_VIEW, gmmIntentUri); mapIntent.setPackage("com.google.android.apps.maps"); startActivity(mapIntent); } } }); } private void changeBackGroundColor() { int colorTo; int colorFrom; if (fullScreenMode) { colorFrom = getBackgroundColor(); colorTo = (ContextCompat.getColor(SingleMediaActivity.this, R.color.md_black_1000)); } else { colorFrom = (ContextCompat.getColor(SingleMediaActivity.this, R.color.md_black_1000)); colorTo = getBackgroundColor(); } ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo); colorAnimation.setDuration(240); colorAnimation.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animator) { ActivityBackground.setBackgroundColor((Integer) animator.getAnimatedValue()); } }); colorAnimation.start(); } @Override public void onBackPressed() { if (details) { viewSwitcher.showPrevious(); toggleSystemUI(); details = false; } else super.onBackPressed(); } @Override public void onPause() { super.onPause(); if (isFinishing()){ overridePendingTransition(0, R.anim.media_zoom_out); } } @Override public void singleTap() { toggleSystemUI(); if(slideshow) { handler.removeCallbacks(slideShowRunnable); slideshow=false; } } @Override public void startPostponedTransition() { getWindow().setSharedElementEnterTransition(new ChangeBounds().setDuration(300)); startPostponedEnterTransition(); } private void setSlideShowDialog() { final AlertDialog.Builder slideshowDialog = new AlertDialog.Builder(SingleMediaActivity.this, getDialogStyle()); final View SlideshowDialogLayout = getLayoutInflater().inflate(R.layout.dialog_slideshow, null); final TextView slideshowDialogTitle = (TextView) SlideshowDialogLayout.findViewById(R.id.slideshow_dialog_title); final CardView slideshowDialogCard = (CardView) SlideshowDialogLayout.findViewById(R.id.slideshow_dialog_card); final EditText editTextTimeInterval = (EditText) SlideshowDialogLayout.findViewById(R.id.slideshow_edittext); slideshowDialogTitle.setBackgroundColor(getPrimaryColor()); slideshowDialogCard.setBackgroundColor(getCardBackgroundColor()); editTextTimeInterval.getBackground().mutate().setColorFilter(getTextColor(), PorterDuff.Mode.SRC_ATOP); editTextTimeInterval.setTextColor(getTextColor()); editTextTimeInterval.setHintTextColor(getSubTextColor()); setCursorDrawableColor(editTextTimeInterval, getTextColor()); slideshowDialog.setView(SlideshowDialogLayout); AlertDialog dialog = slideshowDialog.create(); dialog.setButton(DialogInterface.BUTTON_POSITIVE, getString(R.string.ok).toUpperCase(), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { String value= editTextTimeInterval.getText().toString(); if(!"".equals(value)) { slideshow=true; int intValue = Integer.parseInt(value); SLIDE_SHOW_INTERVAL = intValue * 1000; if(SLIDE_SHOW_INTERVAL > 1000) { hideSystemUI(); handler.postDelayed(slideShowRunnable, SLIDE_SHOW_INTERVAL); } else Toast.makeText(SingleMediaActivity.this, "Minimum duration is 2 sec", Toast.LENGTH_SHORT).show(); } } }); dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); dialog.show(); AlertDialogsHelper.setButtonTextColor(new int[]{DialogInterface.BUTTON_POSITIVE}, getAccentColor(), dialog); } @Override protected void onDestroy() { super.onDestroy(); handler.removeCallbacks(slideShowRunnable); } private final class LoadImageTask extends AsyncTask<String, Void, Bitmap> { @Override protected Bitmap doInBackground(String... params) { return BitmapUtils.getSampledBitmap(params[0], imageWidth / 4, imageHeight / 4); } @Override protected void onCancelled() { super.onCancelled(); } @TargetApi(Build.VERSION_CODES.HONEYCOMB) @Override protected void onCancelled(Bitmap result) { super.onCancelled(result); } @Override protected void onPreExecute() { super.onPreExecute(); } @Override protected void onPostExecute(Bitmap result) { super.onPostExecute(result); if (mainBitmap != null) { mainBitmap.recycle(); mainBitmap = null; System.gc(); } mainBitmap = result; imgView.setImageBitmap(mainBitmap); } } private void setUpViewPager() { BasicCallBack basicCallBack = new BasicCallBack() { @Override public void callBack(int status, Object data) { toggleSystemUI(); } }; if (!allPhotoMode && !favphotomode) { adapter = new ImageAdapter(getAlbum().getMedia(), basicCallBack, this, this); getSupportActionBar().setTitle((getAlbum().getCurrentMediaIndex() + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { getAlbum().setCurrentPhotoIndex(position); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + getAlbum().getMedia().size()); invalidateOptionsMenu(); pathForDescription = getAlbum().getMedia().get(position).getPath(); } }); mViewPager.scrollToPosition(getAlbum().getCurrentMediaIndex()); } else if(allPhotoMode && !favphotomode){ adapter = new ImageAdapter(LFMainActivity.listAll, basicCallBack, this, this); getSupportActionBar().setTitle(current_image_pos + 1 + " " + getString(R.string.of) + " " + size_all); mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = listAll.get(position).getPath(); } }); mViewPager.scrollToPosition(current_image_pos); } else if(!allPhotoMode && favphotomode){ adapter = new ImageAdapter(favouriteslist, basicCallBack, this, this); getSupportActionBar().setTitle(current_image_pos + 1 + " " + getString(R.string.of) + " " + size_all); mViewPager.setOnPageChangeListener(new PagerRecyclerView.OnPageChangeListener() { @Override public void onPageChanged(int oldPosition, int position) { current_image_pos = position; getAlbum().setCurrentPhotoIndex(getAlbum().getCurrentMediaIndex()); toolbar.setTitle((position + 1) + " " + getString(R.string.of) + " " + size_all); invalidateOptionsMenu(); pathForDescription = favouriteslist.get(position).getPath(); } }); mViewPager.scrollToPosition(current_image_pos); } mViewPager.setAdapter(adapter); } }
Fixed #2180
app/src/main/java/org/fossasia/phimpme/gallery/activities/SingleMediaActivity.java
Fixed #2180
Java
apache-2.0
b51830691e4f0738e5d83e8bf4c5f400ab47d405
0
Sotera/graphene,Sotera/graphene,danieljue/graphene,Cognami/graphene,Cognami/graphene,codeaudit/graphene,Cognami/graphene,Sotera/graphene,Sotera/graphene,danieljue/graphene,danieljue/graphene,codeaudit/graphene,codeaudit/graphene,Cognami/graphene
package graphene.web.components.navigation; import graphene.dao.DataSourceListDAO; import graphene.model.idl.G_Constraint; import graphene.model.idl.G_SymbolConstants; import graphene.util.validator.ValidationUtils; import graphene.web.pages.CombinedEntitySearchPage; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.tapestry5.Link; import org.apache.tapestry5.PersistenceConstants; import org.apache.tapestry5.SelectModel; import org.apache.tapestry5.alerts.AlertManager; import org.apache.tapestry5.alerts.Duration; import org.apache.tapestry5.alerts.Severity; import org.apache.tapestry5.annotations.InjectPage; import org.apache.tapestry5.annotations.Persist; import org.apache.tapestry5.annotations.Property; import org.apache.tapestry5.annotations.SetupRender; import org.apache.tapestry5.ioc.annotations.Inject; import org.apache.tapestry5.ioc.annotations.Symbol; import org.apache.tapestry5.services.SelectModelFactory; import org.slf4j.Logger; public class GlobalSearch { @Property private SelectModel availableModels; @Inject SelectModelFactory smf; @Persist @Property private String selectedType; @Inject private DataSourceListDAO dao; @Property private List<String> availableTypes; @Persist @Property private List<Integer> maxResultsList; @Inject private AlertManager alertManager; @Persist(PersistenceConstants.CLIENT) @Property private String searchValue; @Persist @Property private Integer selectedMaxResults; @Inject @Symbol(G_SymbolConstants.DEFAULT_MAX_SEARCH_RESULTS) private Integer defaultMaxResults; @Inject private Logger logger; @InjectPage private CombinedEntitySearchPage searchPage; Object onSuccessFromGlobalSearchForm() { logger.debug("Searching with " + searchValue + " type: " + selectedType); Object retval = null; if (!ValidationUtils.isValid(selectedMaxResults)) { selectedMaxResults = defaultMaxResults; } if (ValidationUtils.isValid(searchValue)) { G_Constraint searchtype = G_Constraint.COMPARE_CONTAINS; if (searchValue.startsWith("\"") && searchValue.endsWith("\"")) { searchtype = G_Constraint.REQUIRED_EQUALS; } final Link link = searchPage.set(dao.getDefaultSchema(), selectedType, searchtype.name(), searchValue, selectedMaxResults); retval = link; } else { alertManager.alert(Duration.TRANSIENT, Severity.ERROR, "Please enter a valid search value."); } if (!ValidationUtils.isValid(retval)) { // alertManager.alert(Duration.TRANSIENT, Severity.WARN, // "There is no search broker configured for this instance of Graphene"); } return retval; } void onValidateFromGlobalSearchForm() { // The searchValue must be valid -- no script tags, etc. // The search type must be a valid type from the list // The search number must be a valid number from the list. } @SetupRender private void setupRender() { maxResultsList = new ArrayList<Integer>(3); maxResultsList.add(new Integer(200)); maxResultsList.add(new Integer(500)); maxResultsList.add(new Integer(1000)); if (!ValidationUtils.isValid(selectedMaxResults)) { selectedMaxResults = defaultMaxResults; } if (!ValidationUtils.isValid(availableTypes)) { availableTypes = dao.getAvailableTypes(); if (!ValidationUtils.isValid(availableTypes)) { logger.error("Could not get a list of types from the server."); } else { Collections.sort(availableTypes); } } } }
graphene-parent/graphene-web/src/main/java/graphene/web/components/navigation/GlobalSearch.java
package graphene.web.components.navigation; import graphene.dao.DataSourceListDAO; import graphene.model.idl.G_Constraint; import graphene.model.idl.G_SymbolConstants; import graphene.util.validator.ValidationUtils; import graphene.web.pages.CombinedEntitySearchPage; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.tapestry5.Link; import org.apache.tapestry5.PersistenceConstants; import org.apache.tapestry5.SelectModel; import org.apache.tapestry5.alerts.AlertManager; import org.apache.tapestry5.alerts.Duration; import org.apache.tapestry5.alerts.Severity; import org.apache.tapestry5.annotations.InjectPage; import org.apache.tapestry5.annotations.Persist; import org.apache.tapestry5.annotations.Property; import org.apache.tapestry5.annotations.SetupRender; import org.apache.tapestry5.ioc.annotations.Inject; import org.apache.tapestry5.ioc.annotations.Symbol; import org.apache.tapestry5.services.SelectModelFactory; import org.slf4j.Logger; public class GlobalSearch { @Property private SelectModel availableModels; @Inject SelectModelFactory smf; @Persist @Property private String selectedType; @Inject private DataSourceListDAO dao; @Property private List<String> availableTypes; @Persist @Property private List<Integer> maxResultsList; @Inject private AlertManager alertManager; @Persist(PersistenceConstants.CLIENT) @Property private String searchValue; @Persist @Property private Integer selectedMaxResults; @Inject @Symbol(G_SymbolConstants.DEFAULT_MAX_SEARCH_RESULTS) private Integer defaultMaxResults; @Inject private Logger logger; @InjectPage private CombinedEntitySearchPage searchPage; Object onSuccessFromGlobalSearchForm() { logger.debug("Searching with " + searchValue + " type: " + selectedType); Object retval = null; if (!ValidationUtils.isValid(selectedMaxResults)) { selectedMaxResults = defaultMaxResults; } if (ValidationUtils.isValid(searchValue)) { G_Constraint searchtype = G_Constraint.COMPARE_CONTAINS; if (searchValue.startsWith("\"") && searchValue.endsWith("\"")) { searchtype = G_Constraint.REQUIRED_EQUALS; } final Link link = searchPage.set(dao.getDefaultSchema(), selectedType, searchtype.name(), searchValue, selectedMaxResults); retval = link; } else { alertManager.alert(Duration.TRANSIENT, Severity.ERROR, "Please enter a valid search value."); } if (!ValidationUtils.isValid(retval)) { alertManager.alert(Duration.TRANSIENT, Severity.WARN, "There is no search broker configured for this instance of Graphene"); } return retval; } void onValidateFromGlobalSearchForm() { // The searchValue must be valid -- no script tags, etc. // The search type must be a valid type from the list // The search number must be a valid number from the list. } @SetupRender private void setupRender() { maxResultsList = new ArrayList<Integer>(3); maxResultsList.add(new Integer(200)); maxResultsList.add(new Integer(500)); maxResultsList.add(new Integer(1000)); if (!ValidationUtils.isValid(selectedMaxResults)) { selectedMaxResults = defaultMaxResults; } if (!ValidationUtils.isValid(availableTypes)) { availableTypes = dao.getAvailableTypes(); if (!ValidationUtils.isValid(availableTypes)) { logger.error("Could not get a list of types from the server."); } else { Collections.sort(availableTypes); } } } }
removed unneeded ui warning
graphene-parent/graphene-web/src/main/java/graphene/web/components/navigation/GlobalSearch.java
removed unneeded ui warning
Java
apache-2.0
484cbc40e414991a8d11dfc9d42891e5f3ccc103
0
robinverduijn/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,gradle/gradle,gstevey/gradle,lsmaira/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,lsmaira/gradle,blindpirate/gradle,gstevey/gradle,gstevey/gradle,robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,gradle/gradle,gstevey/gradle,lsmaira/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,gstevey/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,blindpirate/gradle,lsmaira/gradle,gstevey/gradle
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.execution.taskgraph; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.StandardSystemProperty; import com.google.common.collect.*; import org.gradle.api.*; import org.gradle.api.internal.GradleInternal; import org.gradle.api.internal.TaskInternal; import org.gradle.api.internal.project.ProjectInternal; import org.gradle.api.internal.tasks.CachingTaskDependencyResolveContext; import org.gradle.api.internal.tasks.TaskContainerInternal; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; import org.gradle.api.specs.Spec; import org.gradle.api.specs.Specs; import org.gradle.execution.MultipleBuildFailures; import org.gradle.execution.TaskFailureHandler; import org.gradle.initialization.BuildCancellationToken; import org.gradle.internal.Pair; import org.gradle.internal.UncheckedException; import org.gradle.internal.graph.CachingDirectedGraphWalker; import org.gradle.internal.graph.DirectedGraph; import org.gradle.internal.graph.DirectedGraphRenderer; import org.gradle.internal.graph.GraphNodeRenderer; import org.gradle.internal.logging.text.StyledTextOutput; import org.gradle.internal.resources.ResourceLock; import org.gradle.internal.resources.ResourceLockCoordinationService; import org.gradle.internal.work.WorkerLeaseRegistry.WorkerLease; import org.gradle.internal.work.WorkerLeaseService; import org.gradle.internal.resources.ResourceLockState; import org.gradle.util.CollectionUtils; import org.gradle.util.TextUtil; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.gradle.internal.resources.DefaultResourceLockCoordinationService.unlock; import static org.gradle.internal.resources.ResourceLockState.Disposition.*; /** * A reusable implementation of TaskExecutionPlan. The {@link #addToTaskGraph(java.util.Collection)} and {@link #clear()} methods are NOT threadsafe, and callers must synchronize access to these * methods. */ public class DefaultTaskExecutionPlan implements TaskExecutionPlan { private final static Logger LOGGER = Logging.getLogger(DefaultTaskExecutionPlan.class); private final Set<TaskInfo> tasksInUnknownState = new LinkedHashSet<TaskInfo>(); private final Set<TaskInfo> entryTasks = new LinkedHashSet<TaskInfo>(); private final TaskDependencyGraph graph = new TaskDependencyGraph(); private final LinkedHashMap<Task, TaskInfo> executionPlan = new LinkedHashMap<Task, TaskInfo>(); private final List<TaskInfo> executionQueue = new LinkedList<TaskInfo>(); private final List<Throwable> failures = new ArrayList<Throwable>(); private Spec<? super Task> filter = Specs.satisfyAll(); private TaskFailureHandler failureHandler = new RethrowingFailureHandler(); private final BuildCancellationToken cancellationToken; private final Set<TaskInternal> runningTasks = Sets.newIdentityHashSet(); private final Map<Task, Set<String>> canonicalizedOutputCache = Maps.newIdentityHashMap(); private final Map<TaskInfo, ResourceLock> projectLocks = Maps.newHashMap(); private final ResourceLockCoordinationService coordinationService; private final WorkerLeaseService workerLeaseService; private boolean tasksCancelled; public DefaultTaskExecutionPlan(BuildCancellationToken cancellationToken, ResourceLockCoordinationService coordinationService, WorkerLeaseService workerLeaseService) { this.cancellationToken = cancellationToken; this.coordinationService = coordinationService; this.workerLeaseService = workerLeaseService; } public void addToTaskGraph(Collection<? extends Task> tasks) { List<TaskInfo> queue = new ArrayList<TaskInfo>(); List<Task> sortedTasks = new ArrayList<Task>(tasks); Collections.sort(sortedTasks); for (Task task : sortedTasks) { TaskInfo node = graph.addNode(task); if (node.isMustNotRun()) { requireWithDependencies(node); } else if (filter.isSatisfiedBy(task)) { node.require(); } entryTasks.add(node); queue.add(node); } Set<TaskInfo> visiting = new HashSet<TaskInfo>(); CachingTaskDependencyResolveContext context = new CachingTaskDependencyResolveContext(); while (!queue.isEmpty()) { TaskInfo node = queue.get(0); if (node.getDependenciesProcessed()) { // Have already visited this task - skip it queue.remove(0); continue; } TaskInternal task = node.getTask(); boolean filtered = !filter.isSatisfiedBy(task); if (filtered) { // Task is not required - skip it queue.remove(0); node.dependenciesProcessed(); node.doNotRequire(); continue; } if (visiting.add(node)) { // Have not seen this task before - add its dependencies to the head of the queue and leave this // task in the queue // Make sure it has been configured ((TaskContainerInternal) task.getProject().getTasks()).prepareForExecution(task); Set<? extends Task> dependsOnTasks = context.getDependencies(task); for (Task dependsOnTask : dependsOnTasks) { TaskInfo targetNode = graph.addNode(dependsOnTask); node.addDependencySuccessor(targetNode); if (!visiting.contains(targetNode)) { queue.add(0, targetNode); } } for (Task finalizerTask : task.getFinalizedBy().getDependencies(task)) { TaskInfo targetNode = graph.addNode(finalizerTask); addFinalizerNode(node, targetNode); if (!visiting.contains(targetNode)) { queue.add(0, targetNode); } } for (Task mustRunAfter : task.getMustRunAfter().getDependencies(task)) { TaskInfo targetNode = graph.addNode(mustRunAfter); node.addMustSuccessor(targetNode); } for (Task shouldRunAfter : task.getShouldRunAfter().getDependencies(task)) { TaskInfo targetNode = graph.addNode(shouldRunAfter); node.addShouldSuccessor(targetNode); } if (node.isRequired()) { for (TaskInfo successor : node.getDependencySuccessors()) { if (filter.isSatisfiedBy(successor.getTask())) { successor.require(); } } } else { tasksInUnknownState.add(node); } } else { // Have visited this task's dependencies - add it to the graph queue.remove(0); visiting.remove(node); node.dependenciesProcessed(); } } resolveTasksInUnknownState(); } private void resolveTasksInUnknownState() { List<TaskInfo> queue = new ArrayList<TaskInfo>(tasksInUnknownState); Set<TaskInfo> visiting = new HashSet<TaskInfo>(); while (!queue.isEmpty()) { TaskInfo task = queue.get(0); if (task.isInKnownState()) { queue.remove(0); continue; } if (visiting.add(task)) { for (TaskInfo hardPredecessor : task.getDependencyPredecessors()) { if (!visiting.contains(hardPredecessor)) { queue.add(0, hardPredecessor); } } } else { queue.remove(0); visiting.remove(task); task.mustNotRun(); for (TaskInfo predecessor : task.getDependencyPredecessors()) { assert predecessor.isRequired() || predecessor.isMustNotRun(); if (predecessor.isRequired()) { task.require(); break; } } } } } private void addFinalizerNode(TaskInfo node, TaskInfo finalizerNode) { if (filter.isSatisfiedBy(finalizerNode.getTask())) { node.addFinalizer(finalizerNode); if (!finalizerNode.isInKnownState()) { finalizerNode.mustNotRun(); } finalizerNode.addMustSuccessor(node); } } private <T> void addAllReversed(List<T> list, TreeSet<T> set) { List<T> elements = CollectionUtils.toList(set); Collections.reverse(elements); list.addAll(elements); } private void requireWithDependencies(TaskInfo taskInfo) { if (taskInfo.isMustNotRun() && filter.isSatisfiedBy(taskInfo.getTask())) { taskInfo.require(); for (TaskInfo dependency : taskInfo.getDependencySuccessors()) { requireWithDependencies(dependency); } } } public void determineExecutionPlan() { List<TaskInfoInVisitingSegment> nodeQueue = Lists.newArrayList(Iterables.transform(entryTasks, new Function<TaskInfo, TaskInfoInVisitingSegment>() { int index; public TaskInfoInVisitingSegment apply(TaskInfo taskInfo) { return new TaskInfoInVisitingSegment(taskInfo, index++); } })); int visitingSegmentCounter = nodeQueue.size(); HashMultimap<TaskInfo, Integer> visitingNodes = HashMultimap.create(); Stack<GraphEdge> walkedShouldRunAfterEdges = new Stack<GraphEdge>(); Stack<TaskInfo> path = new Stack<TaskInfo>(); HashMap<TaskInfo, Integer> planBeforeVisiting = new HashMap<TaskInfo, Integer>(); while (!nodeQueue.isEmpty()) { TaskInfoInVisitingSegment taskInfoInVisitingSegment = nodeQueue.get(0); int currentSegment = taskInfoInVisitingSegment.visitingSegment; TaskInfo taskNode = taskInfoInVisitingSegment.taskInfo; if (taskNode.isIncludeInGraph() || executionPlan.containsKey(taskNode.getTask())) { nodeQueue.remove(0); visitingNodes.remove(taskNode, currentSegment); maybeRemoveProcessedShouldRunAfterEdge(walkedShouldRunAfterEdges, taskNode); continue; } boolean alreadyVisited = visitingNodes.containsKey(taskNode); visitingNodes.put(taskNode, currentSegment); if (!alreadyVisited) { // Have not seen this task before - add its dependencies to the head of the queue and leave this // task in the queue recordEdgeIfArrivedViaShouldRunAfter(walkedShouldRunAfterEdges, path, taskNode); removeShouldRunAfterSuccessorsIfTheyImposeACycle(visitingNodes, taskInfoInVisitingSegment); takePlanSnapshotIfCanBeRestoredToCurrentTask(planBeforeVisiting, taskNode); ArrayList<TaskInfo> successors = new ArrayList<TaskInfo>(); addAllSuccessorsInReverseOrder(taskNode, successors); for (TaskInfo successor : successors) { if (visitingNodes.containsEntry(successor, currentSegment)) { if (!walkedShouldRunAfterEdges.empty()) { //remove the last walked should run after edge and restore state from before walking it GraphEdge toBeRemoved = walkedShouldRunAfterEdges.pop(); toBeRemoved.from.removeShouldRunAfterSuccessor(toBeRemoved.to); restorePath(path, toBeRemoved); restoreQueue(nodeQueue, visitingNodes, toBeRemoved); restoreExecutionPlan(planBeforeVisiting, toBeRemoved); break; } else { onOrderingCycle(); } } nodeQueue.add(0, new TaskInfoInVisitingSegment(successor, currentSegment)); } path.push(taskNode); } else { // Have visited this task's dependencies - add it to the end of the plan nodeQueue.remove(0); maybeRemoveProcessedShouldRunAfterEdge(walkedShouldRunAfterEdges, taskNode); visitingNodes.remove(taskNode, currentSegment); path.pop(); executionPlan.put(taskNode.getTask(), taskNode); // Add any finalizers to the queue ArrayList<TaskInfo> finalizerTasks = new ArrayList<TaskInfo>(); addAllReversed(finalizerTasks, taskNode.getFinalizers()); for (TaskInfo finalizer : finalizerTasks) { if (!visitingNodes.containsKey(finalizer)) { nodeQueue.add(finalizerTaskPosition(finalizer, nodeQueue), new TaskInfoInVisitingSegment(finalizer, visitingSegmentCounter++)); } } } } executionQueue.clear(); executionQueue.addAll(executionPlan.values()); } private void maybeRemoveProcessedShouldRunAfterEdge(Stack<GraphEdge> walkedShouldRunAfterEdges, TaskInfo taskNode) { if (!walkedShouldRunAfterEdges.isEmpty() && walkedShouldRunAfterEdges.peek().to.equals(taskNode)) { walkedShouldRunAfterEdges.pop(); } } private void restoreExecutionPlan(HashMap<TaskInfo, Integer> planBeforeVisiting, GraphEdge toBeRemoved) { Iterator<Map.Entry<Task, TaskInfo>> executionPlanIterator = executionPlan.entrySet().iterator(); for (int i = 0; i < planBeforeVisiting.get(toBeRemoved.from); i++) { executionPlanIterator.next(); } while (executionPlanIterator.hasNext()) { executionPlanIterator.next(); executionPlanIterator.remove(); } } private void restoreQueue(List<TaskInfoInVisitingSegment> nodeQueue, HashMultimap<TaskInfo, Integer> visitingNodes, GraphEdge toBeRemoved) { TaskInfoInVisitingSegment nextInQueue = null; while (nextInQueue == null || !toBeRemoved.from.equals(nextInQueue.taskInfo)) { nextInQueue = nodeQueue.get(0); visitingNodes.remove(nextInQueue.taskInfo, nextInQueue.visitingSegment); if (!toBeRemoved.from.equals(nextInQueue.taskInfo)) { nodeQueue.remove(0); } } } private void restorePath(Stack<TaskInfo> path, GraphEdge toBeRemoved) { TaskInfo removedFromPath = null; while (!toBeRemoved.from.equals(removedFromPath)) { removedFromPath = path.pop(); } } private void addAllSuccessorsInReverseOrder(TaskInfo taskNode, ArrayList<TaskInfo> dependsOnTasks) { addAllReversed(dependsOnTasks, taskNode.getDependencySuccessors()); addAllReversed(dependsOnTasks, taskNode.getMustSuccessors()); addAllReversed(dependsOnTasks, taskNode.getShouldSuccessors()); } private void removeShouldRunAfterSuccessorsIfTheyImposeACycle(final HashMultimap<TaskInfo, Integer> visitingNodes, final TaskInfoInVisitingSegment taskNodeWithVisitingSegment) { TaskInfo taskNode = taskNodeWithVisitingSegment.taskInfo; Iterables.removeIf(taskNode.getShouldSuccessors(), new Predicate<TaskInfo>() { public boolean apply(TaskInfo input) { return visitingNodes.containsEntry(input, taskNodeWithVisitingSegment.visitingSegment); } }); } private void takePlanSnapshotIfCanBeRestoredToCurrentTask(HashMap<TaskInfo, Integer> planBeforeVisiting, TaskInfo taskNode) { if (taskNode.getShouldSuccessors().size() > 0) { planBeforeVisiting.put(taskNode, executionPlan.size()); } } private void recordEdgeIfArrivedViaShouldRunAfter(Stack<GraphEdge> walkedShouldRunAfterEdges, Stack<TaskInfo> path, TaskInfo taskNode) { if (!path.empty() && path.peek().getShouldSuccessors().contains(taskNode)) { walkedShouldRunAfterEdges.push(new GraphEdge(path.peek(), taskNode)); } } /** * Given a finalizer task, determine where in the current node queue that it should be inserted. * The finalizer should be inserted after any of it's preceding tasks. */ private int finalizerTaskPosition(TaskInfo finalizer, final List<TaskInfoInVisitingSegment> nodeQueue) { if (nodeQueue.size() == 0) { return 0; } Set<TaskInfo> precedingTasks = getAllPrecedingTasks(finalizer); Set<Integer> precedingTaskIndices = CollectionUtils.collect(precedingTasks, new Transformer<Integer, TaskInfo>() { public Integer transform(final TaskInfo dependsOnTask) { return Iterables.indexOf(nodeQueue, new Predicate<TaskInfoInVisitingSegment>() { public boolean apply(TaskInfoInVisitingSegment taskInfoInVisitingSegment) { return taskInfoInVisitingSegment.taskInfo.equals(dependsOnTask); } }); } }); return Collections.max(precedingTaskIndices) + 1; } private Set<TaskInfo> getAllPrecedingTasks(TaskInfo finalizer) { Set<TaskInfo> precedingTasks = new HashSet<TaskInfo>(); Deque<TaskInfo> candidateTasks = new ArrayDeque<TaskInfo>(); // Consider every task that must run before the finalizer candidateTasks.addAll(finalizer.getDependencySuccessors()); candidateTasks.addAll(finalizer.getMustSuccessors()); candidateTasks.addAll(finalizer.getShouldSuccessors()); // For each candidate task, add it to the preceding tasks. while (!candidateTasks.isEmpty()) { TaskInfo precedingTask = candidateTasks.pop(); if (precedingTasks.add(precedingTask)) { // Any task that the preceding task must run after is also a preceding task. candidateTasks.addAll(precedingTask.getMustSuccessors()); } } return precedingTasks; } private void onOrderingCycle() { CachingDirectedGraphWalker<TaskInfo, Void> graphWalker = new CachingDirectedGraphWalker<TaskInfo, Void>(new DirectedGraph<TaskInfo, Void>() { public void getNodeValues(TaskInfo node, Collection<? super Void> values, Collection<? super TaskInfo> connectedNodes) { connectedNodes.addAll(node.getDependencySuccessors()); connectedNodes.addAll(node.getMustSuccessors()); } }); graphWalker.add(entryTasks); final List<TaskInfo> firstCycle = new ArrayList<TaskInfo>(graphWalker.findCycles().get(0)); Collections.sort(firstCycle); DirectedGraphRenderer<TaskInfo> graphRenderer = new DirectedGraphRenderer<TaskInfo>(new GraphNodeRenderer<TaskInfo>() { public void renderTo(TaskInfo node, StyledTextOutput output) { output.withStyle(StyledTextOutput.Style.Identifier).text(node.getTask().getPath()); } }, new DirectedGraph<TaskInfo, Object>() { public void getNodeValues(TaskInfo node, Collection<? super Object> values, Collection<? super TaskInfo> connectedNodes) { for (TaskInfo dependency : firstCycle) { if (node.getDependencySuccessors().contains(dependency) || node.getMustSuccessors().contains(dependency)) { connectedNodes.add(dependency); } } } }); StringWriter writer = new StringWriter(); graphRenderer.renderTo(firstCycle.get(0), writer); throw new CircularReferenceException(String.format("Circular dependency between the following tasks:%n%s", writer.toString())); } public void clear() { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { graph.clear(); entryTasks.clear(); executionPlan.clear(); executionQueue.clear(); failures.clear(); canonicalizedOutputCache.clear(); runningTasks.clear(); return FINISHED; } }); } public List<Task> getTasks() { return new ArrayList<Task>(executionPlan.keySet()); } public void useFilter(Spec<? super Task> filter) { this.filter = filter; } public void useFailureHandler(TaskFailureHandler handler) { this.failureHandler = handler; } @Override public boolean executeWithTask(WorkerLease parentWorkerLease, final Action<TaskInfo> taskExecution) { final AtomicReference<TaskInfo> selected = new AtomicReference<TaskInfo>(); final AtomicBoolean canExecute = new AtomicBoolean(); final ResourceLock workerLease = parentWorkerLease.createChild(); coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { if (cancellationToken.isCancellationRequested()) { if (abortExecution()) { tasksCancelled = true; } } final Iterator<TaskInfo> iterator = executionQueue.iterator(); while (iterator.hasNext()) { final TaskInfo taskInfo = iterator.next(); if (taskInfo.isReady()) { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { ResourceLock projectLock = getProjectLock(taskInfo); // TODO: convert output file checks to a resource lock if (projectLock.tryLock() && workerLease.tryLock() && taskInfo.allDependenciesComplete() && canRunWithWithCurrentlyExecutedTasks(taskInfo)) { selected.set(taskInfo); iterator.remove(); if (taskInfo.allDependenciesSuccessful()) { taskInfo.startExecution(); recordTaskStarted(taskInfo); canExecute.set(true); } else { taskInfo.skipExecution(); } return FINISHED; } else { return FAILED; } } }); if (selected.get() != null) { break; } } } if (selected.get() == null && !allTasksComplete()) { return RETRY; } else { return FINISHED; } } }); if (selected.get() != null) { try { if (canExecute.get()) { taskExecution.execute(selected.get()); } } finally { TaskInfo taskInfo = selected.get(); ResourceLock projectLock = getProjectLock(taskInfo); coordinationService.withStateLock(unlock(projectLock, workerLease)); } } // If all tasks are complete, we're done return !allTasksComplete(); } private ResourceLock getProjectLock(TaskInfo taskInfo) { if (projectLocks.containsKey(taskInfo)) { return projectLocks.get(taskInfo); } Project project = taskInfo.getTask().getProject(); String gradlePath = ((GradleInternal) project.getGradle()).getIdentityPath().toString(); String projectPath = ((ProjectInternal) project).getIdentityPath().toString(); ResourceLock projectLock = workerLeaseService.getProjectLock(gradlePath, projectPath); projectLocks.put(taskInfo, projectLock); return projectLock; } private boolean canRunWithWithCurrentlyExecutedTasks(TaskInfo taskInfo) { TaskInternal task = taskInfo.getTask(); Pair<TaskInternal, String> overlap = firstTaskWithOverlappingOutput(task); if (overlap == null) { return true; } else { LOGGER.info("Cannot execute task {} in parallel with task {} due to overlapping output: {}", task.getPath(), overlap.left.getPath(), overlap.right); } return false; } private Set<String> canonicalizedOutputPaths(TaskInternal task) { Set<String> paths = canonicalizedOutputCache.get(task); if (paths == null) { paths = Sets.newHashSet(Iterables.transform(task.getOutputs().getFiles(), new Function<File, String>() { @Override public String apply(File file) { String path; try { path = file.getCanonicalPath(); } catch (IOException e) { throw new UncheckedIOException(e); } return path; } })); canonicalizedOutputCache.put(task, paths); } return paths; } @Nullable private Pair<TaskInternal, String> firstTaskWithOverlappingOutput(TaskInternal candidateTask) { if (runningTasks.isEmpty()) { return null; } for (String candidateTaskOutputPath : canonicalizedOutputPaths(candidateTask)) { for (TaskInternal runningTask : runningTasks) { for (String runningTaskOutputPath : canonicalizedOutputPaths(runningTask)) { if (pathsOverlap(candidateTaskOutputPath, runningTaskOutputPath)) { return Pair.of(runningTask, TextUtil.shorterOf(candidateTaskOutputPath, runningTaskOutputPath)); } } } } return null; } private boolean pathsOverlap(String firstPath, String secondPath) { if (firstPath.equals(secondPath)) { return true; } String shorter; String longer; if (firstPath.length() > secondPath.length()) { shorter = secondPath; longer = firstPath; } else { shorter = firstPath; longer = secondPath; } return longer.startsWith(shorter + StandardSystemProperty.FILE_SEPARATOR.value()); } private void recordTaskStarted(TaskInfo taskInfo) { TaskInternal task = taskInfo.getTask(); runningTasks.add(task); } private void recordTaskCompleted(TaskInfo taskInfo) { TaskInternal task = taskInfo.getTask(); canonicalizedOutputCache.remove(task); runningTasks.remove(task); } public void taskComplete(final TaskInfo taskInfo) { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { enforceFinalizerTasks(taskInfo); if (taskInfo.isFailed()) { handleFailure(taskInfo); } taskInfo.finishExecution(); recordTaskCompleted(taskInfo); return FINISHED; } }); } private void enforceFinalizerTasks(TaskInfo taskInfo) { for (TaskInfo finalizerNode : taskInfo.getFinalizers()) { if (finalizerNode.isRequired() || finalizerNode.isMustNotRun()) { enforceWithDependencies(finalizerNode, Sets.<TaskInfo>newHashSet()); } } } private void enforceWithDependencies(TaskInfo nodeInfo, Set<TaskInfo> enforcedTasks) { Deque<TaskInfo> candidateNodes = new ArrayDeque<TaskInfo>(); candidateNodes.add(nodeInfo); while (!candidateNodes.isEmpty()) { TaskInfo node = candidateNodes.pop(); if (!enforcedTasks.contains(node)) { enforcedTasks.add(node); candidateNodes.addAll(node.getDependencySuccessors()); if (node.isMustNotRun() || node.isRequired()) { node.enforceRun(); } } } } private void handleFailure(TaskInfo taskInfo) { Throwable executionFailure = taskInfo.getExecutionFailure(); if (executionFailure != null) { // Always abort execution for an execution failure (as opposed to a task failure) abortExecution(); this.failures.add(executionFailure); return; } // Task failure try { failureHandler.onTaskFailure(taskInfo.getTask()); this.failures.add(taskInfo.getTaskFailure()); } catch (Exception e) { // If the failure handler rethrows exception, then execution of other tasks is aborted. (--continue will collect failures) abortExecution(); this.failures.add(e); } } private boolean abortExecution() { // Allow currently executing and enforced tasks to complete, but skip everything else. boolean aborted = false; for (TaskInfo taskInfo : executionPlan.values()) { if (taskInfo.isRequired()) { taskInfo.skipExecution(); aborted = true; } } return aborted; } public void awaitCompletion() { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { if (allTasksComplete()) { rethrowFailures(); return FINISHED; } else { return RETRY; } } }); } private void rethrowFailures() { if (tasksCancelled) { failures.add(new BuildCancelledException()); } if (failures.isEmpty()) { return; } if (failures.size() > 1) { throw new MultipleBuildFailures(failures); } throw UncheckedException.throwAsUncheckedException(failures.get(0)); } private boolean allTasksComplete() { for (TaskInfo taskInfo : executionPlan.values()) { if (!taskInfo.isComplete()) { return false; } } return true; } private static class GraphEdge { private final TaskInfo from; private final TaskInfo to; private GraphEdge(TaskInfo from, TaskInfo to) { this.from = from; this.to = to; } } private static class TaskInfoInVisitingSegment { private final TaskInfo taskInfo; private final int visitingSegment; private TaskInfoInVisitingSegment(TaskInfo taskInfo, int visitingSegment) { this.taskInfo = taskInfo; this.visitingSegment = visitingSegment; } } private static class RethrowingFailureHandler implements TaskFailureHandler { public void onTaskFailure(Task task) { task.getState().rethrowFailure(); } } }
subprojects/core/src/main/java/org/gradle/execution/taskgraph/DefaultTaskExecutionPlan.java
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.execution.taskgraph; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.StandardSystemProperty; import com.google.common.collect.*; import org.gradle.api.*; import org.gradle.api.internal.GradleInternal; import org.gradle.api.internal.TaskInternal; import org.gradle.api.internal.project.ProjectInternal; import org.gradle.api.internal.tasks.CachingTaskDependencyResolveContext; import org.gradle.api.internal.tasks.TaskContainerInternal; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; import org.gradle.api.specs.Spec; import org.gradle.api.specs.Specs; import org.gradle.execution.MultipleBuildFailures; import org.gradle.execution.TaskFailureHandler; import org.gradle.initialization.BuildCancellationToken; import org.gradle.internal.Pair; import org.gradle.internal.UncheckedException; import org.gradle.internal.graph.CachingDirectedGraphWalker; import org.gradle.internal.graph.DirectedGraph; import org.gradle.internal.graph.DirectedGraphRenderer; import org.gradle.internal.graph.GraphNodeRenderer; import org.gradle.internal.logging.text.StyledTextOutput; import org.gradle.internal.resources.ResourceLock; import org.gradle.internal.resources.ResourceLockCoordinationService; import org.gradle.internal.work.WorkerLeaseRegistry.WorkerLease; import org.gradle.internal.work.WorkerLeaseService; import org.gradle.internal.resources.ResourceLockState; import org.gradle.util.CollectionUtils; import org.gradle.util.TextUtil; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.gradle.internal.resources.DefaultResourceLockCoordinationService.unlock; import static org.gradle.internal.resources.ResourceLockState.Disposition.*; /** * A reusable implementation of TaskExecutionPlan. The {@link #addToTaskGraph(java.util.Collection)} and {@link #clear()} methods are NOT threadsafe, and callers must synchronize access to these * methods. */ public class DefaultTaskExecutionPlan implements TaskExecutionPlan { private final static Logger LOGGER = Logging.getLogger(DefaultTaskExecutionPlan.class); private final Set<TaskInfo> tasksInUnknownState = new LinkedHashSet<TaskInfo>(); private final Set<TaskInfo> entryTasks = new LinkedHashSet<TaskInfo>(); private final TaskDependencyGraph graph = new TaskDependencyGraph(); private final LinkedHashMap<Task, TaskInfo> executionPlan = new LinkedHashMap<Task, TaskInfo>(); private final List<TaskInfo> executionQueue = new LinkedList<TaskInfo>(); private final List<Throwable> failures = new ArrayList<Throwable>(); private Spec<? super Task> filter = Specs.satisfyAll(); private TaskFailureHandler failureHandler = new RethrowingFailureHandler(); private final BuildCancellationToken cancellationToken; private final Set<TaskInternal> runningTasks = Sets.newIdentityHashSet(); private final Map<Task, Set<String>> canonicalizedOutputCache = Maps.newIdentityHashMap(); private final Map<TaskInfo, ResourceLock> projectLocks = Maps.newHashMap(); private final ResourceLockCoordinationService coordinationService; private final WorkerLeaseService workerLeaseService; private boolean tasksCancelled; public DefaultTaskExecutionPlan(BuildCancellationToken cancellationToken, ResourceLockCoordinationService coordinationService, WorkerLeaseService workerLeaseService) { this.cancellationToken = cancellationToken; this.coordinationService = coordinationService; this.workerLeaseService = workerLeaseService; } public void addToTaskGraph(Collection<? extends Task> tasks) { List<TaskInfo> queue = new ArrayList<TaskInfo>(); List<Task> sortedTasks = new ArrayList<Task>(tasks); Collections.sort(sortedTasks); for (Task task : sortedTasks) { TaskInfo node = graph.addNode(task); if (node.isMustNotRun()) { requireWithDependencies(node); } else if (filter.isSatisfiedBy(task)) { node.require(); } entryTasks.add(node); queue.add(node); } Set<TaskInfo> visiting = new HashSet<TaskInfo>(); CachingTaskDependencyResolveContext context = new CachingTaskDependencyResolveContext(); while (!queue.isEmpty()) { TaskInfo node = queue.get(0); if (node.getDependenciesProcessed()) { // Have already visited this task - skip it queue.remove(0); continue; } TaskInternal task = node.getTask(); boolean filtered = !filter.isSatisfiedBy(task); if (filtered) { // Task is not required - skip it queue.remove(0); node.dependenciesProcessed(); node.doNotRequire(); continue; } if (visiting.add(node)) { // Have not seen this task before - add its dependencies to the head of the queue and leave this // task in the queue // Make sure it has been configured ((TaskContainerInternal) task.getProject().getTasks()).prepareForExecution(task); Set<? extends Task> dependsOnTasks = context.getDependencies(task); for (Task dependsOnTask : dependsOnTasks) { TaskInfo targetNode = graph.addNode(dependsOnTask); node.addDependencySuccessor(targetNode); if (!visiting.contains(targetNode)) { queue.add(0, targetNode); } } for (Task finalizerTask : task.getFinalizedBy().getDependencies(task)) { TaskInfo targetNode = graph.addNode(finalizerTask); addFinalizerNode(node, targetNode); if (!visiting.contains(targetNode)) { queue.add(0, targetNode); } } for (Task mustRunAfter : task.getMustRunAfter().getDependencies(task)) { TaskInfo targetNode = graph.addNode(mustRunAfter); node.addMustSuccessor(targetNode); } for (Task shouldRunAfter : task.getShouldRunAfter().getDependencies(task)) { TaskInfo targetNode = graph.addNode(shouldRunAfter); node.addShouldSuccessor(targetNode); } if (node.isRequired()) { for (TaskInfo successor : node.getDependencySuccessors()) { if (filter.isSatisfiedBy(successor.getTask())) { successor.require(); } } } else { tasksInUnknownState.add(node); } } else { // Have visited this task's dependencies - add it to the graph queue.remove(0); visiting.remove(node); node.dependenciesProcessed(); } } resolveTasksInUnknownState(); } private void resolveTasksInUnknownState() { List<TaskInfo> queue = new ArrayList<TaskInfo>(tasksInUnknownState); Set<TaskInfo> visiting = new HashSet<TaskInfo>(); while (!queue.isEmpty()) { TaskInfo task = queue.get(0); if (task.isInKnownState()) { queue.remove(0); continue; } if (visiting.add(task)) { for (TaskInfo hardPredecessor : task.getDependencyPredecessors()) { if (!visiting.contains(hardPredecessor)) { queue.add(0, hardPredecessor); } } } else { queue.remove(0); visiting.remove(task); task.mustNotRun(); for (TaskInfo predecessor : task.getDependencyPredecessors()) { assert predecessor.isRequired() || predecessor.isMustNotRun(); if (predecessor.isRequired()) { task.require(); break; } } } } } private void addFinalizerNode(TaskInfo node, TaskInfo finalizerNode) { if (filter.isSatisfiedBy(finalizerNode.getTask())) { node.addFinalizer(finalizerNode); if (!finalizerNode.isInKnownState()) { finalizerNode.mustNotRun(); } finalizerNode.addMustSuccessor(node); } } private <T> void addAllReversed(List<T> list, TreeSet<T> set) { List<T> elements = CollectionUtils.toList(set); Collections.reverse(elements); list.addAll(elements); } private void requireWithDependencies(TaskInfo taskInfo) { if (taskInfo.isMustNotRun() && filter.isSatisfiedBy(taskInfo.getTask())) { taskInfo.require(); for (TaskInfo dependency : taskInfo.getDependencySuccessors()) { requireWithDependencies(dependency); } } } public void determineExecutionPlan() { List<TaskInfoInVisitingSegment> nodeQueue = Lists.newArrayList(Iterables.transform(entryTasks, new Function<TaskInfo, TaskInfoInVisitingSegment>() { int index; public TaskInfoInVisitingSegment apply(TaskInfo taskInfo) { return new TaskInfoInVisitingSegment(taskInfo, index++); } })); int visitingSegmentCounter = nodeQueue.size(); HashMultimap<TaskInfo, Integer> visitingNodes = HashMultimap.create(); Stack<GraphEdge> walkedShouldRunAfterEdges = new Stack<GraphEdge>(); Stack<TaskInfo> path = new Stack<TaskInfo>(); HashMap<TaskInfo, Integer> planBeforeVisiting = new HashMap<TaskInfo, Integer>(); while (!nodeQueue.isEmpty()) { TaskInfoInVisitingSegment taskInfoInVisitingSegment = nodeQueue.get(0); int currentSegment = taskInfoInVisitingSegment.visitingSegment; TaskInfo taskNode = taskInfoInVisitingSegment.taskInfo; if (taskNode.isIncludeInGraph() || executionPlan.containsKey(taskNode.getTask())) { nodeQueue.remove(0); visitingNodes.remove(taskNode, currentSegment); maybeRemoveProcessedShouldRunAfterEdge(walkedShouldRunAfterEdges, taskNode); continue; } boolean alreadyVisited = visitingNodes.containsKey(taskNode); visitingNodes.put(taskNode, currentSegment); if (!alreadyVisited) { // Have not seen this task before - add its dependencies to the head of the queue and leave this // task in the queue recordEdgeIfArrivedViaShouldRunAfter(walkedShouldRunAfterEdges, path, taskNode); removeShouldRunAfterSuccessorsIfTheyImposeACycle(visitingNodes, taskInfoInVisitingSegment); takePlanSnapshotIfCanBeRestoredToCurrentTask(planBeforeVisiting, taskNode); ArrayList<TaskInfo> successors = new ArrayList<TaskInfo>(); addAllSuccessorsInReverseOrder(taskNode, successors); for (TaskInfo successor : successors) { if (visitingNodes.containsEntry(successor, currentSegment)) { if (!walkedShouldRunAfterEdges.empty()) { //remove the last walked should run after edge and restore state from before walking it GraphEdge toBeRemoved = walkedShouldRunAfterEdges.pop(); toBeRemoved.from.removeShouldRunAfterSuccessor(toBeRemoved.to); restorePath(path, toBeRemoved); restoreQueue(nodeQueue, visitingNodes, toBeRemoved); restoreExecutionPlan(planBeforeVisiting, toBeRemoved); break; } else { onOrderingCycle(); } } nodeQueue.add(0, new TaskInfoInVisitingSegment(successor, currentSegment)); } path.push(taskNode); } else { // Have visited this task's dependencies - add it to the end of the plan nodeQueue.remove(0); maybeRemoveProcessedShouldRunAfterEdge(walkedShouldRunAfterEdges, taskNode); visitingNodes.remove(taskNode, currentSegment); path.pop(); executionPlan.put(taskNode.getTask(), taskNode); // Add any finalizers to the queue ArrayList<TaskInfo> finalizerTasks = new ArrayList<TaskInfo>(); addAllReversed(finalizerTasks, taskNode.getFinalizers()); for (TaskInfo finalizer : finalizerTasks) { if (!visitingNodes.containsKey(finalizer)) { nodeQueue.add(finalizerTaskPosition(finalizer, nodeQueue), new TaskInfoInVisitingSegment(finalizer, visitingSegmentCounter++)); } } } } executionQueue.clear(); executionQueue.addAll(executionPlan.values()); } private void maybeRemoveProcessedShouldRunAfterEdge(Stack<GraphEdge> walkedShouldRunAfterEdges, TaskInfo taskNode) { if (!walkedShouldRunAfterEdges.isEmpty() && walkedShouldRunAfterEdges.peek().to.equals(taskNode)) { walkedShouldRunAfterEdges.pop(); } } private void restoreExecutionPlan(HashMap<TaskInfo, Integer> planBeforeVisiting, GraphEdge toBeRemoved) { Iterator<Map.Entry<Task, TaskInfo>> executionPlanIterator = executionPlan.entrySet().iterator(); for (int i = 0; i < planBeforeVisiting.get(toBeRemoved.from); i++) { executionPlanIterator.next(); } while (executionPlanIterator.hasNext()) { executionPlanIterator.next(); executionPlanIterator.remove(); } } private void restoreQueue(List<TaskInfoInVisitingSegment> nodeQueue, HashMultimap<TaskInfo, Integer> visitingNodes, GraphEdge toBeRemoved) { TaskInfoInVisitingSegment nextInQueue = null; while (nextInQueue == null || !toBeRemoved.from.equals(nextInQueue.taskInfo)) { nextInQueue = nodeQueue.get(0); visitingNodes.remove(nextInQueue.taskInfo, nextInQueue.visitingSegment); if (!toBeRemoved.from.equals(nextInQueue.taskInfo)) { nodeQueue.remove(0); } } } private void restorePath(Stack<TaskInfo> path, GraphEdge toBeRemoved) { TaskInfo removedFromPath = null; while (!toBeRemoved.from.equals(removedFromPath)) { removedFromPath = path.pop(); } } private void addAllSuccessorsInReverseOrder(TaskInfo taskNode, ArrayList<TaskInfo> dependsOnTasks) { addAllReversed(dependsOnTasks, taskNode.getDependencySuccessors()); addAllReversed(dependsOnTasks, taskNode.getMustSuccessors()); addAllReversed(dependsOnTasks, taskNode.getShouldSuccessors()); } private void removeShouldRunAfterSuccessorsIfTheyImposeACycle(final HashMultimap<TaskInfo, Integer> visitingNodes, final TaskInfoInVisitingSegment taskNodeWithVisitingSegment) { TaskInfo taskNode = taskNodeWithVisitingSegment.taskInfo; Iterables.removeIf(taskNode.getShouldSuccessors(), new Predicate<TaskInfo>() { public boolean apply(TaskInfo input) { return visitingNodes.containsEntry(input, taskNodeWithVisitingSegment.visitingSegment); } }); } private void takePlanSnapshotIfCanBeRestoredToCurrentTask(HashMap<TaskInfo, Integer> planBeforeVisiting, TaskInfo taskNode) { if (taskNode.getShouldSuccessors().size() > 0) { planBeforeVisiting.put(taskNode, executionPlan.size()); } } private void recordEdgeIfArrivedViaShouldRunAfter(Stack<GraphEdge> walkedShouldRunAfterEdges, Stack<TaskInfo> path, TaskInfo taskNode) { if (!path.empty() && path.peek().getShouldSuccessors().contains(taskNode)) { walkedShouldRunAfterEdges.push(new GraphEdge(path.peek(), taskNode)); } } /** * Given a finalizer task, determine where in the current node queue that it should be inserted. * The finalizer should be inserted after any of it's preceding tasks. */ private int finalizerTaskPosition(TaskInfo finalizer, final List<TaskInfoInVisitingSegment> nodeQueue) { if (nodeQueue.size() == 0) { return 0; } Set<TaskInfo> precedingTasks = getAllPrecedingTasks(finalizer); Set<Integer> precedingTaskIndices = CollectionUtils.collect(precedingTasks, new Transformer<Integer, TaskInfo>() { public Integer transform(final TaskInfo dependsOnTask) { return Iterables.indexOf(nodeQueue, new Predicate<TaskInfoInVisitingSegment>() { public boolean apply(TaskInfoInVisitingSegment taskInfoInVisitingSegment) { return taskInfoInVisitingSegment.taskInfo.equals(dependsOnTask); } }); } }); return Collections.max(precedingTaskIndices) + 1; } private Set<TaskInfo> getAllPrecedingTasks(TaskInfo finalizer) { Set<TaskInfo> precedingTasks = new HashSet<TaskInfo>(); Deque<TaskInfo> candidateTasks = new ArrayDeque<TaskInfo>(); // Consider every task that must run before the finalizer candidateTasks.addAll(finalizer.getDependencySuccessors()); candidateTasks.addAll(finalizer.getMustSuccessors()); candidateTasks.addAll(finalizer.getShouldSuccessors()); // For each candidate task, add it to the preceding tasks. while (!candidateTasks.isEmpty()) { TaskInfo precedingTask = candidateTasks.pop(); if (precedingTasks.add(precedingTask)) { // Any task that the preceding task must run after is also a preceding task. candidateTasks.addAll(precedingTask.getMustSuccessors()); } } return precedingTasks; } private void onOrderingCycle() { CachingDirectedGraphWalker<TaskInfo, Void> graphWalker = new CachingDirectedGraphWalker<TaskInfo, Void>(new DirectedGraph<TaskInfo, Void>() { public void getNodeValues(TaskInfo node, Collection<? super Void> values, Collection<? super TaskInfo> connectedNodes) { connectedNodes.addAll(node.getDependencySuccessors()); connectedNodes.addAll(node.getMustSuccessors()); } }); graphWalker.add(entryTasks); final List<TaskInfo> firstCycle = new ArrayList<TaskInfo>(graphWalker.findCycles().get(0)); Collections.sort(firstCycle); DirectedGraphRenderer<TaskInfo> graphRenderer = new DirectedGraphRenderer<TaskInfo>(new GraphNodeRenderer<TaskInfo>() { public void renderTo(TaskInfo node, StyledTextOutput output) { output.withStyle(StyledTextOutput.Style.Identifier).text(node.getTask().getPath()); } }, new DirectedGraph<TaskInfo, Object>() { public void getNodeValues(TaskInfo node, Collection<? super Object> values, Collection<? super TaskInfo> connectedNodes) { for (TaskInfo dependency : firstCycle) { if (node.getDependencySuccessors().contains(dependency) || node.getMustSuccessors().contains(dependency)) { connectedNodes.add(dependency); } } } }); StringWriter writer = new StringWriter(); graphRenderer.renderTo(firstCycle.get(0), writer); throw new CircularReferenceException(String.format("Circular dependency between the following tasks:%n%s", writer.toString())); } public void clear() { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { graph.clear(); entryTasks.clear(); executionPlan.clear(); executionQueue.clear(); failures.clear(); canonicalizedOutputCache.clear(); runningTasks.clear(); return FINISHED; } }); } public List<Task> getTasks() { return new ArrayList<Task>(executionPlan.keySet()); } public void useFilter(Spec<? super Task> filter) { this.filter = filter; } public void useFailureHandler(TaskFailureHandler handler) { this.failureHandler = handler; } @Override public boolean executeWithTask(WorkerLease parentWorkerLease, final Action<TaskInfo> taskExecution) { final AtomicReference<TaskInfo> selected = new AtomicReference<TaskInfo>(); final AtomicBoolean canExecute = new AtomicBoolean(); final ResourceLock workerLease = parentWorkerLease.createChild(); coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { if (cancellationToken.isCancellationRequested()) { if (abortExecution()) { tasksCancelled = true; } } final Iterator<TaskInfo> iterator = executionQueue.iterator(); while (iterator.hasNext()) { final TaskInfo taskInfo = iterator.next(); if (taskInfo.isReady() && taskInfo.allDependenciesComplete()) { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { ResourceLock projectLock = getProjectLock(taskInfo); // TODO: convert output file checks to a resource lock if (projectLock.tryLock() && workerLease.tryLock() && canRunWithWithCurrentlyExecutedTasks(taskInfo)) { selected.set(taskInfo); iterator.remove(); if (taskInfo.allDependenciesSuccessful()) { taskInfo.startExecution(); recordTaskStarted(taskInfo); canExecute.set(true); } else { taskInfo.skipExecution(); } return FINISHED; } else { return FAILED; } } }); if (selected.get() != null) { break; } } } if (selected.get() == null && !allTasksComplete()) { return RETRY; } else { return FINISHED; } } }); if (selected.get() != null) { try { if (canExecute.get()) { taskExecution.execute(selected.get()); } } finally { TaskInfo taskInfo = selected.get(); ResourceLock projectLock = getProjectLock(taskInfo); coordinationService.withStateLock(unlock(projectLock, workerLease)); } } // If all tasks are complete, we're done return !allTasksComplete(); } private ResourceLock getProjectLock(TaskInfo taskInfo) { if (projectLocks.containsKey(taskInfo)) { return projectLocks.get(taskInfo); } Project project = taskInfo.getTask().getProject(); String gradlePath = ((GradleInternal) project.getGradle()).getIdentityPath().toString(); String projectPath = ((ProjectInternal) project).getIdentityPath().toString(); ResourceLock projectLock = workerLeaseService.getProjectLock(gradlePath, projectPath); projectLocks.put(taskInfo, projectLock); return projectLock; } private boolean canRunWithWithCurrentlyExecutedTasks(TaskInfo taskInfo) { TaskInternal task = taskInfo.getTask(); Pair<TaskInternal, String> overlap = firstTaskWithOverlappingOutput(task); if (overlap == null) { return true; } else { LOGGER.info("Cannot execute task {} in parallel with task {} due to overlapping output: {}", task.getPath(), overlap.left.getPath(), overlap.right); } return false; } private Set<String> canonicalizedOutputPaths(TaskInternal task) { Set<String> paths = canonicalizedOutputCache.get(task); if (paths == null) { paths = Sets.newHashSet(Iterables.transform(task.getOutputs().getFiles(), new Function<File, String>() { @Override public String apply(File file) { String path; try { path = file.getCanonicalPath(); } catch (IOException e) { throw new UncheckedIOException(e); } return path; } })); canonicalizedOutputCache.put(task, paths); } return paths; } @Nullable private Pair<TaskInternal, String> firstTaskWithOverlappingOutput(TaskInternal candidateTask) { if (runningTasks.isEmpty()) { return null; } for (String candidateTaskOutputPath : canonicalizedOutputPaths(candidateTask)) { for (TaskInternal runningTask : runningTasks) { for (String runningTaskOutputPath : canonicalizedOutputPaths(runningTask)) { if (pathsOverlap(candidateTaskOutputPath, runningTaskOutputPath)) { return Pair.of(runningTask, TextUtil.shorterOf(candidateTaskOutputPath, runningTaskOutputPath)); } } } } return null; } private boolean pathsOverlap(String firstPath, String secondPath) { if (firstPath.equals(secondPath)) { return true; } String shorter; String longer; if (firstPath.length() > secondPath.length()) { shorter = secondPath; longer = firstPath; } else { shorter = firstPath; longer = secondPath; } return longer.startsWith(shorter + StandardSystemProperty.FILE_SEPARATOR.value()); } private void recordTaskStarted(TaskInfo taskInfo) { TaskInternal task = taskInfo.getTask(); runningTasks.add(task); } private void recordTaskCompleted(TaskInfo taskInfo) { TaskInternal task = taskInfo.getTask(); canonicalizedOutputCache.remove(task); runningTasks.remove(task); } public void taskComplete(final TaskInfo taskInfo) { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { enforceFinalizerTasks(taskInfo); if (taskInfo.isFailed()) { handleFailure(taskInfo); } taskInfo.finishExecution(); recordTaskCompleted(taskInfo); return FINISHED; } }); } private void enforceFinalizerTasks(TaskInfo taskInfo) { for (TaskInfo finalizerNode : taskInfo.getFinalizers()) { if (finalizerNode.isRequired() || finalizerNode.isMustNotRun()) { enforceWithDependencies(finalizerNode, Sets.<TaskInfo>newHashSet()); } } } private void enforceWithDependencies(TaskInfo nodeInfo, Set<TaskInfo> enforcedTasks) { Deque<TaskInfo> candidateNodes = new ArrayDeque<TaskInfo>(); candidateNodes.add(nodeInfo); while (!candidateNodes.isEmpty()) { TaskInfo node = candidateNodes.pop(); if (!enforcedTasks.contains(node)) { enforcedTasks.add(node); candidateNodes.addAll(node.getDependencySuccessors()); if (node.isMustNotRun() || node.isRequired()) { node.enforceRun(); } } } } private void handleFailure(TaskInfo taskInfo) { Throwable executionFailure = taskInfo.getExecutionFailure(); if (executionFailure != null) { // Always abort execution for an execution failure (as opposed to a task failure) abortExecution(); this.failures.add(executionFailure); return; } // Task failure try { failureHandler.onTaskFailure(taskInfo.getTask()); this.failures.add(taskInfo.getTaskFailure()); } catch (Exception e) { // If the failure handler rethrows exception, then execution of other tasks is aborted. (--continue will collect failures) abortExecution(); this.failures.add(e); } } private boolean abortExecution() { // Allow currently executing and enforced tasks to complete, but skip everything else. boolean aborted = false; for (TaskInfo taskInfo : executionPlan.values()) { if (taskInfo.isRequired()) { taskInfo.skipExecution(); aborted = true; } } return aborted; } public void awaitCompletion() { coordinationService.withStateLock(new Transformer<ResourceLockState.Disposition, ResourceLockState>() { @Override public ResourceLockState.Disposition transform(ResourceLockState resourceLockState) { if (allTasksComplete()) { rethrowFailures(); return FINISHED; } else { return RETRY; } } }); } private void rethrowFailures() { if (tasksCancelled) { failures.add(new BuildCancelledException()); } if (failures.isEmpty()) { return; } if (failures.size() > 1) { throw new MultipleBuildFailures(failures); } throw UncheckedException.throwAsUncheckedException(failures.get(0)); } private boolean allTasksComplete() { for (TaskInfo taskInfo : executionPlan.values()) { if (!taskInfo.isComplete()) { return false; } } return true; } private static class GraphEdge { private final TaskInfo from; private final TaskInfo to; private GraphEdge(TaskInfo from, TaskInfo to) { this.from = from; this.to = to; } } private static class TaskInfoInVisitingSegment { private final TaskInfo taskInfo; private final int visitingSegment; private TaskInfoInVisitingSegment(TaskInfo taskInfo, int visitingSegment) { this.taskInfo = taskInfo; this.visitingSegment = visitingSegment; } } private static class RethrowingFailureHandler implements TaskFailureHandler { public void onTaskFailure(Task task) { task.getState().rethrowFailure(); } } }
Move task dependency check after project lock
subprojects/core/src/main/java/org/gradle/execution/taskgraph/DefaultTaskExecutionPlan.java
Move task dependency check after project lock
Java
apache-2.0
2f3092b24d4dbdec2ee8d54085a29e0331096db6
0
blackducksoftware/hub-common,blackducksoftware/hub-common,blackducksoftware/hub-common
/** * Hub Common * * Copyright (C) 2017 Black Duck Software, Inc. * http://www.blackducksoftware.com/ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.blackducksoftware.integration.hub.dataservice.license; import java.util.ArrayList; import java.util.List; import com.blackducksoftware.integration.hub.api.component.Component; import com.blackducksoftware.integration.hub.api.component.ComponentRequestService; import com.blackducksoftware.integration.hub.api.component.version.ComplexLicense; import com.blackducksoftware.integration.hub.api.component.version.ComplexLicensePlusMeta; import com.blackducksoftware.integration.hub.api.component.version.ComponentVersion; import com.blackducksoftware.integration.hub.api.component.version.License; import com.blackducksoftware.integration.hub.api.item.MetaService; import com.blackducksoftware.integration.hub.exception.HubIntegrationException; import com.blackducksoftware.integration.hub.rest.RestConnection; import com.blackducksoftware.integration.hub.service.HubRequestService; public class LicenseDataService extends HubRequestService { private final HubRequestService hubRequestService; private final ComponentRequestService componentRequestService; private final MetaService metaService; public LicenseDataService(final RestConnection restConnection, final HubRequestService hubRequestService, final ComponentRequestService componentRequestService, final MetaService metaService) { super(restConnection); this.hubRequestService = hubRequestService; this.componentRequestService = componentRequestService; this.metaService = metaService; } public ComplexLicensePlusMeta getComplexLicensePlusMetaFromComponent(final String namespace, final String groupId, final String artifactId, final String version) throws HubIntegrationException { final Component component = componentRequestService.getExactComponentMatch(namespace, groupId, artifactId, version); final String versionUrl = component.getVersion(); final ComponentVersion componentVersion = hubRequestService.getItem(versionUrl, ComponentVersion.class); final ComplexLicense parentComplexLicense = componentVersion.getLicense(); final List<ComplexLicensePlusMeta> subLicensesPlusMeta = new ArrayList<>(); for (ComplexLicense subLicense : parentComplexLicense.getLicenses()) { final License license = hubRequestService.getItem(subLicense.getLicense(), License.class); final String textUrl = metaService.getFirstLink(license, MetaService.TEXT_LINK); subLicensesPlusMeta.add(new ComplexLicensePlusMeta(subLicense, textUrl, new ArrayList<ComplexLicensePlusMeta>())); } return new ComplexLicensePlusMeta(parentComplexLicense, "", subLicensesPlusMeta); } }
src/main/java/com/blackducksoftware/integration/hub/dataservice/license/LicenseDataService.java
/** * Hub Common * * Copyright (C) 2017 Black Duck Software, Inc. * http://www.blackducksoftware.com/ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.blackducksoftware.integration.hub.dataservice.license; import java.util.ArrayList; import java.util.List; import com.blackducksoftware.integration.hub.api.component.Component; import com.blackducksoftware.integration.hub.api.component.ComponentRequestService; import com.blackducksoftware.integration.hub.api.component.version.ComplexLicense; import com.blackducksoftware.integration.hub.api.component.version.ComplexLicensePlusMeta; import com.blackducksoftware.integration.hub.api.component.version.ComponentVersion; import com.blackducksoftware.integration.hub.api.component.version.License; import com.blackducksoftware.integration.hub.api.item.MetaService; import com.blackducksoftware.integration.hub.exception.HubIntegrationException; import com.blackducksoftware.integration.hub.rest.RestConnection; import com.blackducksoftware.integration.hub.service.HubRequestService; public class LicenseDataService extends HubRequestService { private final HubRequestService hubRequestService; private final ComponentRequestService componentRequestService; private final MetaService metaService; public LicenseDataService(final RestConnection restConnection, final HubRequestService hubRequestService, final ComponentRequestService componentRequestService, final MetaService metaService) { super(restConnection); this.hubRequestService = hubRequestService; this.componentRequestService = componentRequestService; this.metaService = metaService; } public ComplexLicensePlusMeta getComplexLicensePlusMetaFromComponent(final String namespace, final String groupId, final String artifactId, final String version) throws HubIntegrationException { final Component component = componentRequestService.getExactComponentMatch(namespace, groupId, artifactId, version); final String versionUrl = component.getVersion(); final ComponentVersion componentVersion = hubRequestService.getItem(versionUrl, ComponentVersion.class); final ComplexLicense parentComplexLicense = componentVersion.getLicense(); final List<ComplexLicensePlusMeta> subLicensesPlusMeta = new ArrayList<ComplexLicensePlusMeta>(); for(ComplexLicense subLicense : parentComplexLicense.getLicenses()) { final License license = hubRequestService.getItem(subLicense.getLicense(), License.class); //FIXME change to updated method once MetaService is updated final String textUrl = metaService.getLink(license, MetaService.TEXT_LINK); subLicensesPlusMeta.add(new ComplexLicensePlusMeta(subLicense, textUrl, new ArrayList<ComplexLicensePlusMeta>())); } return new ComplexLicensePlusMeta(parentComplexLicense, "", subLicensesPlusMeta); } }
Quick bugfix, resolves issue with use of method that no longer exists.
src/main/java/com/blackducksoftware/integration/hub/dataservice/license/LicenseDataService.java
Quick bugfix, resolves issue with use of method that no longer exists.
Java
apache-2.0
aaff9c4f4a446471557446cf45de5742cb657ac2
0
facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho
// Copyright 2004-present Facebook. All Rights Reserved. package com.facebook.litho.testing.viewtree; public class LevenshteinDistance {
src/debug/java/com/facebook/components/testing/viewtree/LevenshteinDistance.java
// Copyright 2004-present Facebook. All Rights Reserved. package com.facebook.litho.testing.viewtree;
Lines authored by ishtiaq This commit forms part of the blame-preserving initial commit suite.
src/debug/java/com/facebook/components/testing/viewtree/LevenshteinDistance.java
Lines authored by ishtiaq
Java
apache-2.0
aadc807510bcc0a01df6dfedaaeed49bb5379eed
0
wcm-io-devops/maven-eclipse-plugin,wcm-io-devops/maven-eclipse-plugin,wcm-io-devops/maven-eclipse-plugin
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law * or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.apache.maven.plugin.eclipse.writers; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.Manifest; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.eclipse.Constants; import org.apache.maven.plugin.eclipse.EclipseSourceDir; import org.apache.maven.plugin.eclipse.Messages; import org.apache.maven.plugin.eclipse.writers.wtp.AbstractWtpResourceWriter; import org.apache.maven.plugin.ide.IdeDependency; import org.apache.maven.plugin.ide.IdeUtils; import org.apache.maven.plugin.ide.JeeUtils; import org.apache.maven.plugin.logging.Log; import org.apache.maven.project.MavenProject; /** * Create or adapt the manifest files for the RAD6 runtime dependencys. attention these will not be used for the real * ear these are just to get the runtime enviorment using the maven dependencies. WARNING: The manifest resources added * here will not have the benefit of the dependencies of the project, since that's not provided in the setup() apis, one * of the locations from which this writer is used in the RadPlugin. * * @author <a href="mailto:[email protected]">Richard van Nieuwenhoven </a> */ public class EclipseManifestWriter extends AbstractEclipseWriter { private static final String MANIFEST_MF_FILENAME = "MANIFEST.MF"; private static final String META_INF_DIRECTORY = "META-INF"; private static final String GENERATED_RESOURCE_DIRNAME = "target" + File.separatorChar + "generated-resources" + File.separatorChar + "eclipse"; private static final String WEBAPP_RESOURCE_DIR = "src" + File.separatorChar + "main" + File.separatorChar + "webapp"; /** * Returns absolute path to the web content directory based on configuration of the war plugin or default one * otherwise. * * @param project * @return absolute directory path as String * @throws MojoExecutionException */ private static String getWebContentBaseDirectory( EclipseWriterConfig config ) throws MojoExecutionException { // getting true location of web source dir from config File warSourceDirectory = new File( IdeUtils.getPluginSetting( config.getProject(), JeeUtils.ARTIFACT_MAVEN_WAR_PLUGIN, "warSourceDirectory", WEBAPP_RESOURCE_DIR ) ); // getting real and correct path to the web source dir String webContentDir = IdeUtils.toRelativeAndFixSeparator( config.getEclipseProjectDirectory(), warSourceDirectory, false ); // getting the path to meta-inf base dir String result = config.getProject().getBasedir().getAbsolutePath() + File.separatorChar + webContentDir; return result; } /** * Search the project for the existing META-INF directory where the manifest should be located. * * @return the apsolute path to the META-INF directory * @throws MojoExecutionException */ public String getMetaInfBaseDirectory( MavenProject project ) throws MojoExecutionException { String metaInfBaseDirectory = null; if ( this.config.getProject().getPackaging().equals( Constants.PROJECT_PACKAGING_WAR ) ) { // getting the path to meta-inf base dir metaInfBaseDirectory = getWebContentBaseDirectory( this.config ); this.log.debug( "Attempting to use: " + metaInfBaseDirectory + " for location of META-INF in war project." ); File metaInfDirectoryFile = new File( metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.META_INF_DIRECTORY ); if ( metaInfDirectoryFile.exists() && !metaInfDirectoryFile.isDirectory() ) { metaInfBaseDirectory = null; } } for ( int index = this.config.getSourceDirs().length - 1; metaInfBaseDirectory == null && index >= 0; index-- ) { File manifestFile = new File( this.config.getEclipseProjectDirectory(), this.config.getSourceDirs()[index].getPath() + File.separatorChar + EclipseManifestWriter.META_INF_DIRECTORY + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME ); this.log.debug( "Checking for existence of META-INF/MANIFEST.MF file: " + manifestFile ); if ( manifestFile.exists() ) { metaInfBaseDirectory = manifestFile.getParentFile().getParent(); } } return metaInfBaseDirectory; } /** * Write the manifest files use an existing one it it exists (it will be overwritten!! in a war use webapp/META-INF * else use the generated rad6 sourcefolder * * @see AbstractWtpResourceWriter#write(EclipseSourceDir[], ArtifactRepository, File) * @param sourceDirs all eclipse source directorys * @param localRepository the local reposetory * @param buildOutputDirectory build output directory (target) * @throws MojoExecutionException when writing the config files was not possible */ public void write() throws MojoExecutionException { String metaInfBaseDirectory = getMetaInfBaseDirectory( this.config.getProject() ); if ( metaInfBaseDirectory == null ) { // TODO: if this really is an error, shouldn't we stop the build?? throw new MojoExecutionException( Messages.getString( "EclipseCleanMojo.nofilefound", new Object[] { EclipseManifestWriter.META_INF_DIRECTORY } ) ); } // if // (this.config.getEclipseProjectName().equals(IdeUtils.getProjectName(IdeUtils.PROJECT_NAME_WITH_VERSION_TEMPLATE, // this.config.getProject()))) { // MavenArchiver mavenArchiver = new MavenArchiver(); // ManifestConfiguration configuration = new ManifestConfiguration() { // // public boolean isAddClasspath() { // return true; // } // }; // // File manifestFile = new File(metaInfBaseDirectory + File.separatorChar + // EclipseManifestWriter.META_INF_DIRECTORY + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME); // manifestFile.getParentFile().mkdirs(); // // try { // PrintWriter printwriter = new PrintWriter(manifestFile); // mavenArchiver.getManifest(this.config.getProject(), configuration).write(printwriter); // printwriter.close(); // } catch (Exception e) { // this.log.error(Messages.getString("EclipsePlugin.cantwritetofile", new Object[]{ // metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME // })); // } // } else { Manifest manifest = createNewManifest(); File manifestFile = new File( metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.META_INF_DIRECTORY + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME ); log.info( "MANIFEST LOCATION: " + manifestFile ); if ( shouldNewManifestFileBeWritten( manifest, manifestFile ) ) { log.info( "Writing manifest..." ); manifestFile.getParentFile().mkdirs(); try { FileOutputStream stream = new FileOutputStream( manifestFile ); manifest.write( stream ); stream.close(); } catch ( Exception e ) { this.log.error( Messages.getString( "EclipsePlugin.cantwritetofile", new Object[] { metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME } ) ); } // } } } /** * make room for a Manifest file. use a generated resource for JARS and for WARS use the manifest in the * webapp/META-INF directory. * * @throws MojoExecutionException */ public static void addManifestResource( Log log, EclipseWriterConfig config ) throws MojoExecutionException { EclipseManifestWriter manifestWriter = new EclipseManifestWriter(); manifestWriter.init( log, config ); String packaging = config.getProject().getPackaging(); String manifestDirectory = manifestWriter.getMetaInfBaseDirectory( config.getProject() ); if ( !Constants.PROJECT_PACKAGING_EAR.equals( packaging ) && !Constants.PROJECT_PACKAGING_WAR.equals( packaging ) && manifestDirectory == null ) { String generatedResourceDir = config.getProject().getBasedir().getAbsolutePath() + File.separatorChar + EclipseManifestWriter.GENERATED_RESOURCE_DIRNAME; manifestDirectory = generatedResourceDir + File.separatorChar + "META-INF"; try { new File( manifestDirectory ).mkdirs(); File manifestFile = new File( manifestDirectory + File.separatorChar + "MANIFEST.MF" ); if ( manifestFile.exists() ) { manifestFile.delete(); } manifestFile.createNewFile(); } catch ( IOException e ) { log.error( Messages.getString( "EclipsePlugin.cantwritetofile", new Object[] { manifestDirectory + File.separatorChar + "META-INF" + File.separatorChar + "MANIFEST.MF" } ) ); } log.debug( "Adding " + EclipseManifestWriter.GENERATED_RESOURCE_DIRNAME + " to eclipse sources " ); EclipseSourceDir[] sourceDirs = config.getSourceDirs(); EclipseSourceDir[] newSourceDirs = new EclipseSourceDir[sourceDirs.length + 1]; System.arraycopy( sourceDirs, 0, newSourceDirs, 0, sourceDirs.length ); newSourceDirs[sourceDirs.length] = new EclipseSourceDir( EclipseManifestWriter.GENERATED_RESOURCE_DIRNAME, null, true, false, null, null, false ); config.setSourceDirs( newSourceDirs ); } if ( Constants.PROJECT_PACKAGING_WAR.equals( packaging ) ) { new File( getWebContentBaseDirectory( config ) + File.separatorChar + "META-INF" ).mkdirs(); } // special case must be done first because it can add stuff to the // classpath that will be // written by the superclass manifestWriter.write(); } /** * Add one dependency to the black separated classpath stringbuffer. When the project is available in the reactor * (current build) then the project is used else the jar representing the artifact. System dependencies will only be * included if they are in this project. * * @param classpath existing classpath to append * @param dependency dependency to append as jar or as project */ private void addDependencyToClassPath( StringBuffer classpath, IdeDependency dependency ) { if ( !dependency.isTestDependency() && !dependency.isProvided() && !dependency.isSystemScopedOutsideProject( this.config.getProject() ) ) { // blank is the separator in manifest classpath's if ( classpath.length() != 0 ) { classpath.append( ' ' ); } // if the dependency is a workspace project add the project and not // the jar if ( !dependency.isReferencedProject() ) { classpath.append( dependency.getFile().getName() ); } else { classpath.append( dependency.getEclipseProjectName() + ".jar" ); } } } /** * Check if the two manifests are equal. Manifest.equal can not be used because of the special case the Classpath * entr, witch must be comaired sorted so that a different oder in the classpath does not result in "not equal". * This not not realy correct but in this case it is more important to reduce the number of version-controll files. * * @param manifest the new manifest * @param existingManifest to compaire the new one with * @return are the manifests equal */ private boolean areManifestsEqual( Manifest manifest, Manifest existingManifest ) { if ( existingManifest == null ) { return false; } Set keys = new HashSet(); Attributes existingMap = existingManifest.getMainAttributes(); Attributes newMap = manifest.getMainAttributes(); keys.addAll( existingMap.keySet() ); keys.addAll( newMap.keySet() ); Iterator iterator = keys.iterator(); while ( iterator.hasNext() ) { Attributes.Name key = (Attributes.Name) iterator.next(); String newValue = (String) newMap.get( key ); String existingValue = (String) existingMap.get( key ); // special case classpath... they are qual when there entries // are equal if ( Attributes.Name.CLASS_PATH.equals( key ) ) { newValue = orderClasspath( newValue ); existingValue = orderClasspath( existingValue ); } if ( ( newValue == null || !newValue.equals( existingValue ) ) && ( existingValue == null || !existingValue.equals( newValue ) ) ) { return false; } } return true; } /** * Convert all dependencies in a blank seperated list of jars and projects representing the classpath. * * @return the blank separeted classpath string */ private String constructManifestClasspath() { StringBuffer stringBuffer = new StringBuffer(); IdeDependency[] deps = this.config.getDepsOrdered(); for ( int index = 0; index < deps.length; index++ ) { addDependencyToClassPath( stringBuffer, deps[index] ); } return stringBuffer.toString(); } /** * Create a manifest contaigning the required classpath. * * @return the newly created manifest */ private Manifest createNewManifest() { Manifest manifest = new Manifest(); manifest.getMainAttributes().put( Attributes.Name.MANIFEST_VERSION, "1.0" ); manifest.getMainAttributes().put( Attributes.Name.CLASS_PATH, constructManifestClasspath() ); return manifest; } /** * Aphabeticaly sort the classpath. Do this by splitting it up, sort the entries and gleue them together again. * * @param newValue classpath to sort * @return the sorted classpath */ private String orderClasspath( String newValue ) { if ( newValue == null ) { return null; } String[] entries = newValue.split( " " ); Arrays.sort( entries ); StringBuffer buffer = new StringBuffer( newValue.length() ); for ( int index = 0; index < entries.length; index++ ) { buffer.append( entries[index] ); buffer.append( ' ' ); } return buffer.toString(); } /** * Read and parse the existing manifest file. * * @param manifestFile file * @return the read manifest * @throws IOException if the file could not be read */ private Manifest readExistingManifest( File manifestFile ) throws IOException { if ( !manifestFile.exists() ) { return null; } Manifest existingManifest = new Manifest(); FileInputStream inputStream = new FileInputStream( manifestFile ); existingManifest.read( inputStream ); inputStream.close(); return existingManifest; } /** * Verify is the manifest sould be overwritten this sould take in account that the manifest should only be written * if the contents of the classpath was changed not the order. The classpath sorting oder should be ignored. * * @param manifest the newly created classpath * @param manifestFile the file where the manifest * @return if the new manifest file must be written * @throws MojoExecutionException */ private boolean shouldNewManifestFileBeWritten( Manifest manifest, File manifestFile ) throws MojoExecutionException { try { Manifest existingManifest = readExistingManifest( manifestFile ); if ( areManifestsEqual( manifest, existingManifest ) ) { this.log.info( Messages.getString( "EclipseCleanMojo.unchanged", manifestFile.getAbsolutePath() ) ); return false; } } catch ( Exception e ) { throw new MojoExecutionException( Messages.getString( "EclipseCleanMojo.nofilefound", manifestFile.getAbsolutePath() ), e ); } return true; } }
src/main/java/org/apache/maven/plugin/eclipse/writers/EclipseManifestWriter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law * or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.apache.maven.plugin.eclipse.writers; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.Manifest; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.eclipse.Constants; import org.apache.maven.plugin.eclipse.EclipseSourceDir; import org.apache.maven.plugin.eclipse.Messages; import org.apache.maven.plugin.eclipse.writers.wtp.AbstractWtpResourceWriter; import org.apache.maven.plugin.ide.IdeDependency; import org.apache.maven.plugin.ide.IdeUtils; import org.apache.maven.plugin.ide.JeeUtils; import org.apache.maven.plugin.logging.Log; import org.apache.maven.project.MavenProject; /** * Create or adapt the manifest files for the RAD6 runtime dependencys. attention these will not be used for the real * ear these are just to get the runtime enviorment using the maven dependencies. WARNING: The manifest resources added * here will not have the benefit of the dependencies of the project, since that's not provided in the setup() apis, one * of the locations from which this writer is used in the RadPlugin. * * @author <a href="mailto:[email protected]">Richard van Nieuwenhoven </a> */ public class EclipseManifestWriter extends AbstractEclipseWriter { private static final String MANIFEST_MF_FILENAME = "MANIFEST.MF"; private static final String META_INF_DIRECTORY = "META-INF"; private static final String GENERATED_RESOURCE_DIRNAME = "target" + File.separatorChar + "generated-resources" + File.separatorChar + "eclipse"; private static final String WEBAPP_RESOURCE_DIR = "src" + File.separatorChar + "main" + File.separatorChar + "webapp"; /** * Returns absolute path to the web content directory based on configuration of the war plugin or default one * otherwise. * * @param project * @return absolute directory path as String * @throws MojoExecutionException */ private static String getWebContentBaseDirectory( EclipseWriterConfig config ) throws MojoExecutionException { // getting true location of web source dir from config File warSourceDirectory = new File( IdeUtils.getPluginSetting( config.getProject(), JeeUtils.ARTIFACT_MAVEN_WAR_PLUGIN, "warSourceDirectory", WEBAPP_RESOURCE_DIR ) ); // getting real and correct path to the web source dir String webContentDir = IdeUtils.toRelativeAndFixSeparator( config.getEclipseProjectDirectory(), warSourceDirectory, false ); // getting the path to meta-inf base dir String result = config.getProject().getBasedir().getAbsolutePath() + File.separatorChar + webContentDir; return result; } /** * Search the project for the existing META-INF directory where the manifest should be located. * * @return the apsolute path to the META-INF directory * @throws MojoExecutionException */ public String getMetaInfBaseDirectory( MavenProject project ) throws MojoExecutionException { String metaInfBaseDirectory = null; if ( this.config.getProject().getPackaging().equals( Constants.PROJECT_PACKAGING_WAR ) ) { // getting the path to meta-inf base dir metaInfBaseDirectory = getWebContentBaseDirectory( this.config ); this.log.debug( "Attempting to use: " + metaInfBaseDirectory + " for location of META-INF in war project." ); File metaInfDirectoryFile = new File( metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.META_INF_DIRECTORY ); if ( metaInfDirectoryFile.exists() && !metaInfDirectoryFile.isDirectory() ) { metaInfBaseDirectory = null; } } for ( int index = this.config.getSourceDirs().length - 1; metaInfBaseDirectory == null && index >= 0; index-- ) { File manifestFile = new File( this.config.getEclipseProjectDirectory(), this.config.getSourceDirs()[index].getPath() + File.separatorChar + EclipseManifestWriter.META_INF_DIRECTORY + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME ); this.log.debug( "Checking for existence of META-INF/MANIFEST.MF file: " + manifestFile ); if ( manifestFile.exists() ) { metaInfBaseDirectory = manifestFile.getParentFile().getParent(); } } return metaInfBaseDirectory; } /** * Write the manifest files use an existing one it it exists (it will be overwritten!! in a war use webapp/META-INF * else use the generated rad6 sourcefolder * * @see AbstractWtpResourceWriter#write(EclipseSourceDir[], ArtifactRepository, File) * @param sourceDirs all eclipse source directorys * @param localRepository the local reposetory * @param buildOutputDirectory build output directory (target) * @throws MojoExecutionException when writing the config files was not possible */ public void write() throws MojoExecutionException { String metaInfBaseDirectory = getMetaInfBaseDirectory( this.config.getProject() ); if ( metaInfBaseDirectory == null ) { // TODO: if this really is an error, shouldn't we stop the build?? throw new MojoExecutionException( Messages.getString( "EclipseCleanMojo.nofilefound", new Object[] { EclipseManifestWriter.META_INF_DIRECTORY } ) ); } // if // (this.config.getEclipseProjectName().equals(IdeUtils.getProjectName(IdeUtils.PROJECT_NAME_WITH_VERSION_TEMPLATE, // this.config.getProject()))) { // MavenArchiver mavenArchiver = new MavenArchiver(); // ManifestConfiguration configuration = new ManifestConfiguration() { // // public boolean isAddClasspath() { // return true; // } // }; // // File manifestFile = new File(metaInfBaseDirectory + File.separatorChar + // EclipseManifestWriter.META_INF_DIRECTORY + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME); // manifestFile.getParentFile().mkdirs(); // // try { // PrintWriter printwriter = new PrintWriter(manifestFile); // mavenArchiver.getManifest(this.config.getProject(), configuration).write(printwriter); // printwriter.close(); // } catch (Exception e) { // this.log.error(Messages.getString("EclipsePlugin.cantwritetofile", new Object[]{ // metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME // })); // } // } else { Manifest manifest = createNewManifest(); File manifestFile = new File( metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.META_INF_DIRECTORY + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME ); System.out.println( "MANIFEST LOCATION: " + manifestFile ); if ( shouldNewManifestFileBeWritten( manifest, manifestFile ) ) { System.out.println( "Writing manifest..." ); manifestFile.getParentFile().mkdirs(); try { FileOutputStream stream = new FileOutputStream( manifestFile ); manifest.write( stream ); stream.close(); } catch ( Exception e ) { this.log.error( Messages.getString( "EclipsePlugin.cantwritetofile", new Object[] { metaInfBaseDirectory + File.separatorChar + EclipseManifestWriter.MANIFEST_MF_FILENAME } ) ); } // } } } /** * make room for a Manifest file. use a generated resource for JARS and for WARS use the manifest in the * webapp/META-INF directory. * * @throws MojoExecutionException */ public static void addManifestResource( Log log, EclipseWriterConfig config ) throws MojoExecutionException { EclipseManifestWriter manifestWriter = new EclipseManifestWriter(); manifestWriter.init( log, config ); String packaging = config.getProject().getPackaging(); String manifestDirectory = manifestWriter.getMetaInfBaseDirectory( config.getProject() ); if ( !Constants.PROJECT_PACKAGING_EAR.equals( packaging ) && !Constants.PROJECT_PACKAGING_WAR.equals( packaging ) && manifestDirectory == null ) { String generatedResourceDir = config.getProject().getBasedir().getAbsolutePath() + File.separatorChar + EclipseManifestWriter.GENERATED_RESOURCE_DIRNAME; manifestDirectory = generatedResourceDir + File.separatorChar + "META-INF"; try { new File( manifestDirectory ).mkdirs(); File manifestFile = new File( manifestDirectory + File.separatorChar + "MANIFEST.MF" ); if ( manifestFile.exists() ) { manifestFile.delete(); } manifestFile.createNewFile(); } catch ( IOException e ) { log.error( Messages.getString( "EclipsePlugin.cantwritetofile", new Object[] { manifestDirectory + File.separatorChar + "META-INF" + File.separatorChar + "MANIFEST.MF" } ) ); } log.debug( "Adding " + EclipseManifestWriter.GENERATED_RESOURCE_DIRNAME + " to eclipse sources " ); EclipseSourceDir[] sourceDirs = config.getSourceDirs(); EclipseSourceDir[] newSourceDirs = new EclipseSourceDir[sourceDirs.length + 1]; System.arraycopy( sourceDirs, 0, newSourceDirs, 0, sourceDirs.length ); newSourceDirs[sourceDirs.length] = new EclipseSourceDir( EclipseManifestWriter.GENERATED_RESOURCE_DIRNAME, null, true, false, null, null, false ); config.setSourceDirs( newSourceDirs ); } if ( Constants.PROJECT_PACKAGING_WAR.equals( packaging ) ) { new File( getWebContentBaseDirectory( config ) + File.separatorChar + "META-INF" ).mkdirs(); } // special case must be done first because it can add stuff to the // classpath that will be // written by the superclass manifestWriter.write(); } /** * Add one dependency to the black separated classpath stringbuffer. When the project is available in the reactor * (current build) then the project is used else the jar representing the artifact. System dependencies will only be * included if they are in this project. * * @param classpath existing classpath to append * @param dependency dependency to append as jar or as project */ private void addDependencyToClassPath( StringBuffer classpath, IdeDependency dependency ) { if ( !dependency.isTestDependency() && !dependency.isProvided() && !dependency.isSystemScopedOutsideProject( this.config.getProject() ) ) { // blank is the separator in manifest classpath's if ( classpath.length() != 0 ) { classpath.append( ' ' ); } // if the dependency is a workspace project add the project and not // the jar if ( !dependency.isReferencedProject() ) { classpath.append( dependency.getFile().getName() ); } else { classpath.append( dependency.getEclipseProjectName() + ".jar" ); } } } /** * Check if the two manifests are equal. Manifest.equal can not be used because of the special case the Classpath * entr, witch must be comaired sorted so that a different oder in the classpath does not result in "not equal". * This not not realy correct but in this case it is more important to reduce the number of version-controll files. * * @param manifest the new manifest * @param existingManifest to compaire the new one with * @return are the manifests equal */ private boolean areManifestsEqual( Manifest manifest, Manifest existingManifest ) { if ( existingManifest == null ) { return false; } Set keys = new HashSet(); Attributes existingMap = existingManifest.getMainAttributes(); Attributes newMap = manifest.getMainAttributes(); keys.addAll( existingMap.keySet() ); keys.addAll( newMap.keySet() ); Iterator iterator = keys.iterator(); while ( iterator.hasNext() ) { Attributes.Name key = (Attributes.Name) iterator.next(); String newValue = (String) newMap.get( key ); String existingValue = (String) existingMap.get( key ); // special case classpath... they are qual when there entries // are equal if ( Attributes.Name.CLASS_PATH.equals( key ) ) { newValue = orderClasspath( newValue ); existingValue = orderClasspath( existingValue ); } if ( ( newValue == null || !newValue.equals( existingValue ) ) && ( existingValue == null || !existingValue.equals( newValue ) ) ) { return false; } } return true; } /** * Convert all dependencies in a blank seperated list of jars and projects representing the classpath. * * @return the blank separeted classpath string */ private String constructManifestClasspath() { StringBuffer stringBuffer = new StringBuffer(); IdeDependency[] deps = this.config.getDepsOrdered(); for ( int index = 0; index < deps.length; index++ ) { addDependencyToClassPath( stringBuffer, deps[index] ); } return stringBuffer.toString(); } /** * Create a manifest contaigning the required classpath. * * @return the newly created manifest */ private Manifest createNewManifest() { Manifest manifest = new Manifest(); manifest.getMainAttributes().put( Attributes.Name.MANIFEST_VERSION, "1.0" ); manifest.getMainAttributes().put( Attributes.Name.CLASS_PATH, constructManifestClasspath() ); return manifest; } /** * Aphabeticaly sort the classpath. Do this by splitting it up, sort the entries and gleue them together again. * * @param newValue classpath to sort * @return the sorted classpath */ private String orderClasspath( String newValue ) { if ( newValue == null ) { return null; } String[] entries = newValue.split( " " ); Arrays.sort( entries ); StringBuffer buffer = new StringBuffer( newValue.length() ); for ( int index = 0; index < entries.length; index++ ) { buffer.append( entries[index] ); buffer.append( ' ' ); } return buffer.toString(); } /** * Read and parse the existing manifest file. * * @param manifestFile file * @return the read manifest * @throws IOException if the file could not be read */ private Manifest readExistingManifest( File manifestFile ) throws IOException { if ( !manifestFile.exists() ) { return null; } Manifest existingManifest = new Manifest(); FileInputStream inputStream = new FileInputStream( manifestFile ); existingManifest.read( inputStream ); inputStream.close(); return existingManifest; } /** * Verify is the manifest sould be overwritten this sould take in account that the manifest should only be written * if the contents of the classpath was changed not the order. The classpath sorting oder should be ignored. * * @param manifest the newly created classpath * @param manifestFile the file where the manifest * @return if the new manifest file must be written * @throws MojoExecutionException */ private boolean shouldNewManifestFileBeWritten( Manifest manifest, File manifestFile ) throws MojoExecutionException { try { Manifest existingManifest = readExistingManifest( manifestFile ); if ( areManifestsEqual( manifest, existingManifest ) ) { this.log.info( Messages.getString( "EclipseCleanMojo.unchanged", manifestFile.getAbsolutePath() ) ); return false; } } catch ( Exception e ) { throw new MojoExecutionException( Messages.getString( "EclipseCleanMojo.nofilefound", manifestFile.getAbsolutePath() ), e ); } return true; } }
replace some System.out git-svn-id: 6038db50b076e48c7926ed71fd94f8e91be2fbc9@627727 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/maven/plugin/eclipse/writers/EclipseManifestWriter.java
replace some System.out
Java
apache-2.0
fc2adccf2da155d719c813a8dacf44dafcf89b59
0
inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service
/* * Copyright 2012-2013 inBloom, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.slc.sli.api.resources.security; import java.net.URI; import java.util.*; import javax.annotation.PostConstruct; import javax.ws.rs.GET; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import com.google.common.collect.Sets; import org.slc.sli.api.security.SLIPrincipal; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Scope; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Component; import org.slc.sli.api.config.EntityDefinition; import org.slc.sli.api.config.EntityDefinitionStore; import org.slc.sli.api.representation.EntityBody; import org.slc.sli.api.resources.v1.HypermediaType; import org.slc.sli.api.security.RightsAllowed; import org.slc.sli.api.security.SecurityEventBuilder; import org.slc.sli.api.security.context.resolver.EdOrgHelper; import org.slc.sli.api.service.EntityService; import org.slc.sli.api.util.SecurityUtil; import org.slc.sli.common.util.logging.SecurityEvent; import org.slc.sli.domain.Entity; import org.slc.sli.domain.NeutralCriteria; import org.slc.sli.domain.NeutralQuery; import org.slc.sli.domain.Repository; import org.slc.sli.domain.enums.Right; /** * * App auths are stored in mongo in the format * * { * applicationId: id of application from application collection, * edorgs: ids of all the edorgs (schools, LEAs, and SEAs) that have authorized the application. * } * * The endpoint supports three operations * * GET /applicationAuthorization * GET /applicationAuthorization/id * PUT /applicationAuthorization/id * * On a GET, it returns data of the format * { * appId: id of the application * authorized: true|false * } * * For LEA administrators the content is based on the user's LEA. * For SEA administrators the content is based on delegated SEAs. * * If an SEA administrator needs to distinguish between two edorgs, a * ?edorgs=... query parameter can be used on all operations. * * On a PUT, the endpoint automatically registers parent and child edorgs. */ @Component @Scope("request") @Path("/applicationAuthorization") @Produces({ HypermediaType.JSON + ";charset=utf-8" }) public class ApplicationAuthorizationResource { @Autowired private EntityDefinitionStore store; @Autowired @Qualifier("validationRepo") Repository<Entity> repo; @Autowired private EdOrgHelper helper; @Autowired private DelegationUtil delegation; private EntityService service; @Autowired private SecurityEventBuilder securityEventBuilder; @Context UriInfo uri; public static final String RESOURCE_NAME = "applicationAuthorization"; public static final String APP_ID = "applicationId"; public static final String EDORG_IDS = "edorgs"; @PostConstruct public void init() { EntityDefinition def = store.lookupByResourceName(RESOURCE_NAME); service = def.getService(); } @GET @Path("{appId}") @RightsAllowed({Right.EDORG_APP_AUTHZ, Right.EDORG_DELEGATE }) public Response getAuthorization(@PathParam("appId") String appId, @QueryParam("edorg") String edorg) { String myEdorg = validateEdOrg(edorg); EntityBody appAuth = getAppAuth(appId); if (appAuth == null) { //See if this is an actual app Entity appEntity = repo.findOne("application", new NeutralQuery(new NeutralCriteria("_id", "=", appId))); if (appEntity == null) { return Response.status(Status.NOT_FOUND).build(); } else { HashMap<String, Object> entity = new HashMap<String, Object>(); entity.put("id", appId); entity.put("appId", appId); entity.put("authorized", false); entity.put("edorgs", Collections.emptyList());//(TA10857) return Response.status(Status.OK).entity(entity).build(); } } else { HashMap<String, Object> entity = new HashMap<String, Object>(); entity.put("appId", appId); entity.put("id", appId); List<Map<String,Object>> edOrgs = (List<Map<String,Object>>) appAuth.get("edorgs"); entity.put("authorized", containsEdOrg(edOrgs, myEdorg)); entity.put("edorgs", edOrgs);//(TA10857) return Response.status(Status.OK).entity(entity).build(); } } private boolean containsEdOrg(List<Map<String,Object>> edOrgList, String edOrg) { if( edOrgList == null || edOrg == null ) { return false; } for (Map<String,Object> edOrgListElement :edOrgList ){ String authorizedEdorg = (String)edOrgListElement.get("authorizedEdorg"); if(authorizedEdorg != null){ if(edOrg.equals(authorizedEdorg)){ return true; } } } return false; } private EntityBody getAppAuth(String appId) { Iterable<EntityBody> appAuths = service.list(new NeutralQuery(new NeutralCriteria("applicationId", "=", appId))); for (EntityBody auth : appAuths) { return auth; } return null; } @PUT @Path("{appId}") @RightsAllowed({Right.EDORG_APP_AUTHZ, Right.EDORG_DELEGATE }) public Response updateAuthorization(@PathParam("appId") String appId, EntityBody auth) { if (!auth.containsKey("authorized")) { return Response.status(Status.BAD_REQUEST).build(); } List<String> edOrgsToAuthorize = (List<String>) auth.get("edorgs");//(TA10857) if( edOrgsToAuthorize == null) { edOrgsToAuthorize = Collections.emptyList(); } EntityBody existingAuth = getAppAuth(appId); if (existingAuth == null) { //See if this is an actual app Entity appEntity = repo.findOne("application", new NeutralQuery(new NeutralCriteria("_id", "=", appId))); if (appEntity == null) { return Response.status(Status.NOT_FOUND).build(); } else { if (((Boolean) auth.get("authorized")).booleanValue()) { //being set to true. if false, there's no work to be done //We don't have an appauth entry for this app, so create one EntityBody body = new EntityBody(); body.put("applicationId", appId); body.put("edorgs", enrichAuthorizedEdOrgsList(edOrgsToAuthorize)); service.create(body); logSecurityEvent(appId, null, edOrgsToAuthorize); } return Response.status(Status.NO_CONTENT).build(); } } else { List<Map<String,Object>> oldEdOrgs = (List<Map<String,Object>>)existingAuth.get("edorgs"); Set<String> oldAuth = getSetOfAuthorizedIds(oldEdOrgs); List<String> newAuth = (List<String>)(auth.get("edorgs")); logSecurityEvent(appId, oldAuth, newAuth); boolean addOrRemove = ((Boolean) auth.get("authorized")).booleanValue(); List<Map<String,Object>> modifiedAuthList = modifyEdOrgList(oldEdOrgs, addOrRemove, newAuth) ; existingAuth.put("edorgs", modifiedAuthList); service.update((String) existingAuth.get("id"), existingAuth); return Response.status(Status.NO_CONTENT).build(); } } private Set<String> getSetOfAuthorizedIds( List<Map<String,Object>> currentAuthList) { Set<String> authSet = new HashSet<String>(); for(Map<String, Object> currentAuthListItem:currentAuthList) { String authorizedEdorg = (String)currentAuthListItem.get("authorizedEdorg"); authSet.add(authorizedEdorg); } return authSet; } @SuppressWarnings("PMD.AvoidReassigningParameters") private List<Map<String,Object>> modifyEdOrgList( List<Map<String,Object>> currentAuthList, boolean add, List<String> newEdOrgList ) { if(currentAuthList == null) { currentAuthList = new LinkedList<Map<String, Object>>(); } Set<String> newAuthSet = new HashSet<String>(newEdOrgList); Set<String> oldAuthSet = getSetOfAuthorizedIds(currentAuthList); if(add) { newAuthSet.removeAll(oldAuthSet); for(String newAuthItem :newAuthSet) { Map<String, Object> newAuthItemProps = new HashMap<String, Object>(); newAuthItemProps.put("authorizedEdorg", newAuthItem); enrichAuthorizedEdOrg(newAuthItemProps); currentAuthList.add(newAuthItemProps); } } else { ListIterator<Map<String, Object>> it = currentAuthList.listIterator(); while(it.hasNext()) { Map<String, Object> currentAuthListItem = it.next(); String authorizedEdorg = (String)currentAuthListItem.get("authorizedEdorg"); if(newAuthSet.contains(authorizedEdorg)) { it.remove(); } } } return currentAuthList; } public static List<Map<String, Object>> enrichAuthorizedEdOrgsList(List<String> edOrgIds) { List<Map<String, Object>> enrichedAEOList = new LinkedList<Map<String, Object>>(); for(String edOrgId:edOrgIds) { Map<String, Object> enrichedAEO = new HashMap<String, Object>(); enrichedAEO.put("authorizedEdorg", edOrgId); enrichedAEOList.add(enrichAuthorizedEdOrg(enrichedAEO)); } return enrichedAEOList; } private static Map<String, Object> enrichAuthorizedEdOrg(Map<String, Object> authInfo) { SLIPrincipal principal = (SLIPrincipal) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); String user = principal.getExternalId(); String time = String.valueOf(new Date().getTime()); String lastAuthorizingRealmEdorg = principal.getRealmEdOrg(); String lastAuthorizingUser = user; String lastAuthorizedDate = time; authInfo.put("lastAuthorizingRealmEdorg", lastAuthorizingRealmEdorg); authInfo.put("lastAuthorizingUser", lastAuthorizingUser); authInfo.put("lastAuthorizedDate", lastAuthorizedDate); return authInfo; } List<String> getParentEdorgs(String rootEdorg) { return helper.getParentEdOrgs(helper.byId(rootEdorg)); } Set<String> getChildEdorgs(String rootEdorg) { return helper.getChildEdOrgs(Arrays.asList(rootEdorg)); } @GET @RightsAllowed({Right.EDORG_APP_AUTHZ, Right.EDORG_DELEGATE }) public Response getAuthorizations(@QueryParam("edorg") String edorg) { String myEdorg = validateEdOrg(edorg); Iterable<Entity> appQuery = repo.findAll("application", new NeutralQuery()); Map<String, Entity> allApps = new HashMap<String, Entity>(); for (Entity ent : appQuery) { allApps.put(ent.getEntityId(), ent); } Iterable<EntityBody> ents = service.list(new NeutralQuery(new NeutralCriteria("edorgs.authorizedEdorg", "=", myEdorg))); List<Map> results = new ArrayList<Map>(); for (EntityBody body : ents) { HashMap<String, Object> entity = new HashMap<String, Object>(); String appId = (String) body.get("applicationId"); entity.put("id", appId); entity.put("appId", appId); entity.put("authorized", true); results.add(entity); allApps.remove(appId); } for (Map.Entry<String, Entity> entry : allApps.entrySet()) { Boolean autoApprove = (Boolean) entry.getValue().getBody().get("allowed_for_all_edorgs"); List<String> approvedEdorgs = (List<String>) entry.getValue().getBody().get("authorized_ed_orgs"); if ((autoApprove != null && autoApprove) || (approvedEdorgs != null && approvedEdorgs.contains(myEdorg))) { HashMap<String, Object> entity = new HashMap<String, Object>(); entity.put("id", entry.getKey()); entity.put("appId", entry.getKey()); entity.put("authorized", false); results.add(entity); } } return Response.status(Status.OK).entity(results).build(); } private void logSecurityEvent(String appId, Collection<String> oldEdOrgs, Collection<String> newEdOrgs) { Set<String> oldEO = (oldEdOrgs == null)?Collections.<String>emptySet():new HashSet<String>(oldEdOrgs); Set<String> newEO = (newEdOrgs == null)?Collections.<String>emptySet():new HashSet<String>(newEdOrgs); info("EdOrgs that App could access earlier " + helper.getEdOrgStateOrganizationIds(oldEO)); info("EdOrgs that App can access now " + helper.getEdOrgStateOrganizationIds(newEO)); URI path = (uri != null)?uri.getRequestUri():null; String resourceClassName = ApplicationAuthorizationResource.class.getName(); Set<String> granted = Sets.difference(newEO, oldEO); if(granted.size() > 0) { SecurityEvent event = securityEventBuilder.createSecurityEvent(resourceClassName, path, "Application granted access to EdOrg data!", true); event.setAppId(appId); Set<String> targetEdOrgList = helper.getEdOrgStateOrganizationIds(granted); event.setTargetEdOrgList(new ArrayList<String>(targetEdOrgList)); event.setTargetEdOrg(""); audit(event); } Set<String> revoked = Sets.difference(oldEO, newEO); if(revoked.size() > 0) { SecurityEvent event = securityEventBuilder.createSecurityEvent(resourceClassName, path, "EdOrg data access has been revoked!", true); event.setAppId(appId); Set<String> targetEdOrgList = helper.getEdOrgStateOrganizationIds(revoked); event.setTargetEdOrgList(new ArrayList<String>(targetEdOrgList)); event.setTargetEdOrg(""); audit(event); } } private String validateEdOrg(String edorg) { if (edorg == null) { return SecurityUtil.getEdOrgId(); } // US5894 removed the need for LEA to delegate app approval to SEA /* if (!edorg.equals(SecurityUtil.getEdOrgId()) && !delegation.getAppApprovalDelegateEdOrgs().contains(edorg) ) { Set<String> edOrgIds = new HashSet<String>(); edOrgIds.add(edorg); throw new APIAccessDeniedException("Cannot perform authorizations for edorg ", edOrgIds); } */ return edorg; } }
sli/api/src/main/java/org/slc/sli/api/resources/security/ApplicationAuthorizationResource.java
/* * Copyright 2012-2013 inBloom, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.slc.sli.api.resources.security; import java.net.URI; import java.util.*; import javax.annotation.PostConstruct; import javax.ws.rs.GET; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import com.google.common.collect.Sets; import org.slc.sli.api.security.SLIPrincipal; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Scope; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Component; import org.slc.sli.api.config.EntityDefinition; import org.slc.sli.api.config.EntityDefinitionStore; import org.slc.sli.api.representation.EntityBody; import org.slc.sli.api.resources.v1.HypermediaType; import org.slc.sli.api.security.RightsAllowed; import org.slc.sli.api.security.SecurityEventBuilder; import org.slc.sli.api.security.context.resolver.EdOrgHelper; import org.slc.sli.api.service.EntityService; import org.slc.sli.api.util.SecurityUtil; import org.slc.sli.common.util.logging.SecurityEvent; import org.slc.sli.domain.Entity; import org.slc.sli.domain.NeutralCriteria; import org.slc.sli.domain.NeutralQuery; import org.slc.sli.domain.Repository; import org.slc.sli.domain.enums.Right; /** * * App auths are stored in mongo in the format * * { * applicationId: id of application from application collection, * edorgs: ids of all the edorgs (schools, LEAs, and SEAs) that have authorized the application. * } * * The endpoint supports three operations * * GET /applicationAuthorization * GET /applicationAuthorization/id * PUT /applicationAuthorization/id * * On a GET, it returns data of the format * { * appId: id of the application * authorized: true|false * } * * For LEA administrators the content is based on the user's LEA. * For SEA administrators the content is based on delegated SEAs. * * If an SEA administrator needs to distinguish between two edorgs, a * ?edorgs=... query parameter can be used on all operations. * * On a PUT, the endpoint automatically registers parent and child edorgs. */ @Component @Scope("request") @Path("/applicationAuthorization") @Produces({ HypermediaType.JSON + ";charset=utf-8" }) public class ApplicationAuthorizationResource { @Autowired private EntityDefinitionStore store; @Autowired @Qualifier("validationRepo") Repository<Entity> repo; @Autowired private EdOrgHelper helper; @Autowired private DelegationUtil delegation; private EntityService service; @Autowired private SecurityEventBuilder securityEventBuilder; @Context UriInfo uri; public static final String RESOURCE_NAME = "applicationAuthorization"; public static final String APP_ID = "applicationId"; public static final String EDORG_IDS = "edorgs"; @PostConstruct public void init() { EntityDefinition def = store.lookupByResourceName(RESOURCE_NAME); service = def.getService(); } @GET @Path("{appId}") @RightsAllowed({Right.EDORG_APP_AUTHZ, Right.EDORG_DELEGATE }) public Response getAuthorization(@PathParam("appId") String appId, @QueryParam("edorg") String edorg) { String myEdorg = validateEdOrg(edorg); EntityBody appAuth = getAppAuth(appId); if (appAuth == null) { //See if this is an actual app Entity appEntity = repo.findOne("application", new NeutralQuery(new NeutralCriteria("_id", "=", appId))); if (appEntity == null) { return Response.status(Status.NOT_FOUND).build(); } else { HashMap<String, Object> entity = new HashMap<String, Object>(); entity.put("id", appId); entity.put("appId", appId); entity.put("authorized", false); entity.put("edorgs", Collections.emptyList());//(TA10857) return Response.status(Status.OK).entity(entity).build(); } } else { HashMap<String, Object> entity = new HashMap<String, Object>(); entity.put("appId", appId); entity.put("id", appId); List<Map<String,Object>> edOrgs = (List<Map<String,Object>>) appAuth.get("edorgs"); entity.put("authorized", containsEdOrg(edOrgs, myEdorg)); entity.put("edorgs", edOrgs);//(TA10857) return Response.status(Status.OK).entity(entity).build(); } } private boolean containsEdOrg(List<Map<String,Object>> edOrgList, String edOrg) { if( edOrgList == null || edOrg == null ) { return false; } for (Map<String,Object> edOrgListElement :edOrgList ){ String authorizedEdorg = (String)edOrgListElement.get("authorizedEdorg"); if(authorizedEdorg != null){ if(edOrg.equals(authorizedEdorg)){ return true; } } } return false; } private EntityBody getAppAuth(String appId) { Iterable<EntityBody> appAuths = service.list(new NeutralQuery(new NeutralCriteria("applicationId", "=", appId))); for (EntityBody auth : appAuths) { return auth; } return null; } @PUT @Path("{appId}") @RightsAllowed({Right.EDORG_APP_AUTHZ, Right.EDORG_DELEGATE }) public Response updateAuthorization(@PathParam("appId") String appId, EntityBody auth) { if (!auth.containsKey("authorized")) { return Response.status(Status.BAD_REQUEST).build(); } List<String> edOrgsToAuthorize = (List<String>) auth.get("edorgs");//(TA10857) if( edOrgsToAuthorize == null) { edOrgsToAuthorize = Collections.emptyList(); } EntityBody existingAuth = getAppAuth(appId); if (existingAuth == null) { //See if this is an actual app Entity appEntity = repo.findOne("application", new NeutralQuery(new NeutralCriteria("_id", "=", appId))); if (appEntity == null) { return Response.status(Status.NOT_FOUND).build(); } else { if (((Boolean) auth.get("authorized")).booleanValue()) { //being set to true. if false, there's no work to be done //We don't have an appauth entry for this app, so create one EntityBody body = new EntityBody(); body.put("applicationId", appId); body.put("edorgs", enrichAuthorizedEdOrgsList(edOrgsToAuthorize)); service.create(body); logSecurityEvent(appId, null, edOrgsToAuthorize); } return Response.status(Status.NO_CONTENT).build(); } } else { List<Map<String,Object>> oldEdOrgs = (List<Map<String,Object>>)existingAuth.get("edorgs"); Set<String> oldAuth = getSetOfAuthorizedIds(oldEdOrgs); List<String> newAuth = (List<String>)(auth.get("edorgs")); logSecurityEvent(appId, oldAuth, newAuth); boolean addOrRemove = ((Boolean) auth.get("authorized")).booleanValue(); List<Map<String,Object>> modifiedAuthList = modifyEdOrgList(oldEdOrgs, addOrRemove, newAuth) ; existingAuth.put("edorgs", modifiedAuthList); service.update((String) existingAuth.get("id"), existingAuth); return Response.status(Status.NO_CONTENT).build(); } } private Set<String> getSetOfAuthorizedIds( List<Map<String,Object>> currentAuthList) { Set<String> authSet = new HashSet<String>(); for(Map<String, Object> currentAuthListItem:currentAuthList) { String authorizedEdorg = (String)currentAuthListItem.get("authorizedEdorg"); authSet.add(authorizedEdorg); } return authSet; } private List<Map<String,Object>> modifyEdOrgList( List<Map<String,Object>> currentAuthList, boolean add, List<String> newEdOrgList ) { if(currentAuthList == null) { currentAuthList = new LinkedList<Map<String, Object>>(); } Set<String> newAuthSet = new HashSet<String>(newEdOrgList); Set<String> oldAuthSet = getSetOfAuthorizedIds(currentAuthList); if(add) { newAuthSet.removeAll(oldAuthSet); for(String newAuthItem :newAuthSet) { Map<String, Object> newAuthItemProps = new HashMap<String, Object>(); newAuthItemProps.put("authorizedEdorg", newAuthItem); enrichAuthorizedEdOrg(newAuthItemProps); currentAuthList.add(newAuthItemProps); } } else { ListIterator<Map<String, Object>> it = currentAuthList.listIterator(); while(it.hasNext()) { Map<String, Object> currentAuthListItem = it.next(); String authorizedEdorg = (String)currentAuthListItem.get("authorizedEdorg"); if(newAuthSet.contains(authorizedEdorg)) { it.remove(); } } } return currentAuthList; } public static List<Map<String, Object>> enrichAuthorizedEdOrgsList(List<String> edOrgIds) { List<Map<String, Object>> enrichedAEOList = new LinkedList<Map<String, Object>>(); for(String edOrgId:edOrgIds) { Map<String, Object> enrichedAEO = new HashMap<String, Object>(); enrichedAEO.put("authorizedEdorg", edOrgId); enrichedAEOList.add(enrichAuthorizedEdOrg(enrichedAEO)); } return enrichedAEOList; } private static Map<String, Object> enrichAuthorizedEdOrg(Map<String, Object> authInfo) { SLIPrincipal principal = (SLIPrincipal) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); String user = principal.getExternalId(); String time = String.valueOf(new Date().getTime()); String lastAuthorizingRealmEdorg = principal.getRealmEdOrg(); String lastAuthorizingUser = user; String lastAuthorizedDate = time; authInfo.put("lastAuthorizingRealmEdorg", lastAuthorizingRealmEdorg); authInfo.put("lastAuthorizingUser", lastAuthorizingUser); authInfo.put("lastAuthorizedDate", lastAuthorizedDate); return authInfo; } List<String> getParentEdorgs(String rootEdorg) { return helper.getParentEdOrgs(helper.byId(rootEdorg)); } Set<String> getChildEdorgs(String rootEdorg) { return helper.getChildEdOrgs(Arrays.asList(rootEdorg)); } @GET @RightsAllowed({Right.EDORG_APP_AUTHZ, Right.EDORG_DELEGATE }) public Response getAuthorizations(@QueryParam("edorg") String edorg) { String myEdorg = validateEdOrg(edorg); Iterable<Entity> appQuery = repo.findAll("application", new NeutralQuery()); Map<String, Entity> allApps = new HashMap<String, Entity>(); for (Entity ent : appQuery) { allApps.put(ent.getEntityId(), ent); } Iterable<EntityBody> ents = service.list(new NeutralQuery(new NeutralCriteria("edorgs.authorizedEdorg", "=", myEdorg))); List<Map> results = new ArrayList<Map>(); for (EntityBody body : ents) { HashMap<String, Object> entity = new HashMap<String, Object>(); String appId = (String) body.get("applicationId"); entity.put("id", appId); entity.put("appId", appId); entity.put("authorized", true); results.add(entity); allApps.remove(appId); } for (Map.Entry<String, Entity> entry : allApps.entrySet()) { Boolean autoApprove = (Boolean) entry.getValue().getBody().get("allowed_for_all_edorgs"); List<String> approvedEdorgs = (List<String>) entry.getValue().getBody().get("authorized_ed_orgs"); if ((autoApprove != null && autoApprove) || (approvedEdorgs != null && approvedEdorgs.contains(myEdorg))) { HashMap<String, Object> entity = new HashMap<String, Object>(); entity.put("id", entry.getKey()); entity.put("appId", entry.getKey()); entity.put("authorized", false); results.add(entity); } } return Response.status(Status.OK).entity(results).build(); } private void logSecurityEvent(String appId, Collection<String> oldEdOrgs, Collection<String> newEdOrgs) { Set<String> oldEO = (oldEdOrgs == null)?Collections.<String>emptySet():new HashSet<String>(oldEdOrgs); Set<String> newEO = (newEdOrgs == null)?Collections.<String>emptySet():new HashSet<String>(newEdOrgs); info("EdOrgs that App could access earlier " + helper.getEdOrgStateOrganizationIds(oldEO)); info("EdOrgs that App can access now " + helper.getEdOrgStateOrganizationIds(newEO)); URI path = (uri != null)?uri.getRequestUri():null; String resourceClassName = ApplicationAuthorizationResource.class.getName(); Set<String> granted = Sets.difference(newEO, oldEO); if(granted.size() > 0) { SecurityEvent event = securityEventBuilder.createSecurityEvent(resourceClassName, path, "Application granted access to EdOrg data!", true); event.setAppId(appId); Set<String> targetEdOrgList = helper.getEdOrgStateOrganizationIds(granted); event.setTargetEdOrgList(new ArrayList<String>(targetEdOrgList)); event.setTargetEdOrg(""); audit(event); } Set<String> revoked = Sets.difference(oldEO, newEO); if(revoked.size() > 0) { SecurityEvent event = securityEventBuilder.createSecurityEvent(resourceClassName, path, "EdOrg data access has been revoked!", true); event.setAppId(appId); Set<String> targetEdOrgList = helper.getEdOrgStateOrganizationIds(revoked); event.setTargetEdOrgList(new ArrayList<String>(targetEdOrgList)); event.setTargetEdOrg(""); audit(event); } } private String validateEdOrg(String edorg) { if (edorg == null) { return SecurityUtil.getEdOrgId(); } // US5894 removed the need for LEA to delegate app approval to SEA /* if (!edorg.equals(SecurityUtil.getEdOrgId()) && !delegation.getAppApprovalDelegateEdOrgs().contains(edorg) ) { Set<String> edOrgIds = new HashSet<String>(); edOrgIds.add(edorg); throw new APIAccessDeniedException("Cannot perform authorizations for edorg ", edOrgIds); } */ return edorg; } }
US5860 Handle applicationAuthorization schema change 5/n
sli/api/src/main/java/org/slc/sli/api/resources/security/ApplicationAuthorizationResource.java
US5860 Handle applicationAuthorization schema change 5/n
Java
apache-2.0
faa27cc14581706e56ea196d03e345908b650354
0
OmniLayer/OmniJ,OmniLayer/OmniJ,OmniLayer/OmniJ
package foundation.omni.rest.omniwallet.json; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import foundation.omni.CurrencyID; import foundation.omni.OmniValue; import foundation.omni.json.pojo.OmniPropertyInfo; import org.bitcoinj.core.Address; import org.bitcoinj.core.Sha256Hash; import java.math.BigDecimal; import java.util.List; /** * Java POJO for per-property record in /v1/properties/list response */ @JsonIgnoreProperties(ignoreUnknown = true) public class OmniwalletPropertyInfo { // Satoshi's address that received the block reward for Block 0 private static Address bitcoinIssuerAddress = Address.fromString(null, "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa"); private final long blocktime; private final String category; private final Sha256Hash creationTxId; private final String data; private final boolean divisible; private final boolean fixedIssuance; private final boolean freezingEnabled; private final Address issuer; private final List<Object> issuances; private final boolean managedIssuance; private final String name; private final CurrencyID propertyid; private final String subcategory; private final OmniValue totalTokens; private final String url; public OmniwalletPropertyInfo(@JsonProperty("blocktime") long blocktime, @JsonProperty("category") String category, @JsonProperty("creationtxid") Sha256Hash creationTxId, @JsonProperty("data") String data, @JsonProperty("divisible") boolean divisible, @JsonProperty("fixedissuance") boolean fixedIssuance, @JsonProperty("freezingenabled") boolean freezingEnabled, @JsonProperty("issuer") String issuerString, @JsonProperty("issuances") List<Object> issuances, @JsonProperty("managedissuance") boolean managedIssuance, @JsonProperty("name") String name, @JsonProperty("propertyid") CurrencyID propertyid, @JsonProperty("subcategory") String subcategory, @JsonProperty("totaltokens") String totalTokensString, @JsonProperty("url") String url) { this.blocktime = blocktime; this.category = category; this.creationTxId = creationTxId; this.data = data; this.divisible = divisible; this.fixedIssuance = fixedIssuance; this.freezingEnabled = freezingEnabled; this.issuer = mapIssuer(issuerString); this.issuances = issuances; this.managedIssuance = managedIssuance; this.name = name; this.propertyid = propertyid; this.subcategory = subcategory; this.totalTokens = OmniValue.of(new BigDecimal(totalTokensString), divisible); this.url = url; } public long getBlocktime() { return blocktime; } public String getCategory() { return category; } public Sha256Hash getCreationTxId() { return creationTxId; } public String getData() { return data; } public boolean isDivisible() { return divisible; } public boolean isFixedIssuance() { return fixedIssuance; } public Address getIssuer() { return issuer; } public boolean isManagedIssuance() { return managedIssuance; } public String getName() { return name; } public CurrencyID getPropertyid() { return propertyid; } public String getSubcategory() { return subcategory; } public OmniValue getTotalTokens() { return totalTokens; } public String getUrl() { return url; } /** * Map "issuer" from String to Address. * Omniwallet doesn't return a valid "issuer" {@code Address} for the BTC property, instead it returns * "Satoshi Nakamoto". To return a strongly typed {@code Address}, we'll return the address * that received the block reward for the genesis block. * * @param issuerString The "issuer" JSON value returned by Omniwallet * @return The issuer converted to an {@code Address} */ private static Address mapIssuer(String issuerString) { return issuerString.equals("Satoshi Nakamoto") ? bitcoinIssuerAddress : Address.fromString(null, issuerString); } } /* { "properties": [ { "blocktime": 1377994675, "category": "N/A", "creationtxid": "0000000000000000000000000000000000000000000000000000000000000000", "data": "Omni serve as the binding between Bitcoin, smart properties and contracts created on the Omni Layer.", "divisible": true, "fixedissuance": false, "issuer": "1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P", "managedissuance": false, "name": "Omni", "propertyid": 1, "subcategory": "N/A", "totaltokens": "617211.68177584", "url": "http://www.omnilayer.org" }, { "blocktime": 1377994675, "category": "N/A", "creationtxid": "0000000000000000000000000000000000000000000000000000000000000000", "data": "Test Omni serve as the binding between Bitcoin, smart properties and contracts created on the Omni Layer.", "divisible": true, "fixedissuance": false, "issuer": "1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P", "managedissuance": false, "name": "Test Omni", "propertyid": 2, "subcategory": "N/A", "totaltokens": "563162.35759628", "url": "http://www.omnilayer.org" }, { "additional records": "..." } ], "status": "OK" } */
omnij-rest-client/src/main/java/foundation/omni/rest/omniwallet/json/OmniwalletPropertyInfo.java
package foundation.omni.rest.omniwallet.json; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import foundation.omni.CurrencyID; import foundation.omni.OmniValue; import foundation.omni.json.pojo.OmniPropertyInfo; import org.bitcoinj.core.Address; import org.bitcoinj.core.Sha256Hash; import java.math.BigDecimal; import java.util.List; /** * Java POJO for per-property record in /v1/properties/list response */ @JsonIgnoreProperties(ignoreUnknown = true) public class OmniwalletPropertyInfo { private final long blocktime; private final String category; private final Sha256Hash creationTxId; private final String data; private final boolean divisible; private final boolean fixedIssuance; private final boolean freezingEnabled; private final Address issuer; private final List<Object> issuances; private final boolean managedIssuance; private final String name; private final CurrencyID propertyid; private final String subcategory; private final OmniValue totalTokens; private final String url; public OmniwalletPropertyInfo(@JsonProperty("blocktime") long blocktime, @JsonProperty("category") String category, @JsonProperty("creationtxid") Sha256Hash creationTxId, @JsonProperty("data") String data, @JsonProperty("divisible") boolean divisible, @JsonProperty("fixedissuance") boolean fixedIssuance, @JsonProperty("freezingenabled") boolean freezingEnabled, @JsonProperty("issuer") String issuerString, @JsonProperty("issuances") List<Object> issuances, @JsonProperty("managedissuance") boolean managedIssuance, @JsonProperty("name") String name, @JsonProperty("propertyid") CurrencyID propertyid, @JsonProperty("subcategory") String subcategory, @JsonProperty("totaltokens") String totalTokensString, @JsonProperty("url") String url) { this.blocktime = blocktime; this.category = category; this.creationTxId = creationTxId; this.data = data; this.divisible = divisible; this.fixedIssuance = fixedIssuance; this.freezingEnabled = freezingEnabled; this.issuer = mapIssuer(issuerString); this.issuances = issuances; this.managedIssuance = managedIssuance; this.name = name; this.propertyid = propertyid; this.subcategory = subcategory; this.totalTokens = OmniValue.of(new BigDecimal(totalTokensString), divisible); this.url = url; } public long getBlocktime() { return blocktime; } public String getCategory() { return category; } public Sha256Hash getCreationTxId() { return creationTxId; } public String getData() { return data; } public boolean isDivisible() { return divisible; } public boolean isFixedIssuance() { return fixedIssuance; } public Address getIssuer() { return issuer; } public boolean isManagedIssuance() { return managedIssuance; } public String getName() { return name; } public CurrencyID getPropertyid() { return propertyid; } public String getSubcategory() { return subcategory; } public OmniValue getTotalTokens() { return totalTokens; } public String getUrl() { return url; } private static Address mapIssuer(String issuerString) { return issuerString.equals("Satoshi Nakamoto") ? OmniPropertyInfo.defaultIssuerAddress : Address.fromString(null, issuerString); } } /* { "properties": [ { "blocktime": 1377994675, "category": "N/A", "creationtxid": "0000000000000000000000000000000000000000000000000000000000000000", "data": "Omni serve as the binding between Bitcoin, smart properties and contracts created on the Omni Layer.", "divisible": true, "fixedissuance": false, "issuer": "1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P", "managedissuance": false, "name": "Omni", "propertyid": 1, "subcategory": "N/A", "totaltokens": "617211.68177584", "url": "http://www.omnilayer.org" }, { "blocktime": 1377994675, "category": "N/A", "creationtxid": "0000000000000000000000000000000000000000000000000000000000000000", "data": "Test Omni serve as the binding between Bitcoin, smart properties and contracts created on the Omni Layer.", "divisible": true, "fixedissuance": false, "issuer": "1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P", "managedissuance": false, "name": "Test Omni", "propertyid": 2, "subcategory": "N/A", "totaltokens": "563162.35759628", "url": "http://www.omnilayer.org" }, { "additional records": "..." } ], "status": "OK" } */
OmniwalletPropertyInfo.bitcoinIssuerAddress Use the coinbase address from the Genesis Block as the Omni property issuer Address for BTC.
omnij-rest-client/src/main/java/foundation/omni/rest/omniwallet/json/OmniwalletPropertyInfo.java
OmniwalletPropertyInfo.bitcoinIssuerAddress
Java
apache-2.0
44309fed91c130a486ef329b31b9522095ae8dd8
0
lemire/incubator-kylin,haoch/kylin,haoch/kylin,murkrishn/incubator-kylin,haoch/kylin,lemire/incubator-kylin,murkrishn/incubator-kylin,haoch/kylin,haoch/kylin,lemire/incubator-kylin,murkrishn/incubator-kylin,murkrishn/incubator-kylin,lemire/incubator-kylin,haoch/kylin
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.job.hadoop.invertedindex; import org.apache.commons.cli.Options; import org.apache.hadoop.util.ToolRunner; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.invertedindex.IIDescManager; import org.apache.kylin.invertedindex.IIInstance; import org.apache.kylin.invertedindex.IIManager; import org.apache.kylin.invertedindex.model.IIDesc; import org.apache.kylin.job.JobInstance; import org.apache.kylin.job.JoinedFlatTable; import org.apache.kylin.job.cmd.ICommandOutput; import org.apache.kylin.job.cmd.ShellCmd; import org.apache.kylin.job.engine.JobEngineConfig; import org.apache.kylin.job.hadoop.AbstractHadoopJob; import org.apache.kylin.job.hadoop.hive.IIJoinedFlatTableDesc; import org.apache.kylin.job.hadoop.hive.IJoinedFlatTableDesc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by Hongbin Ma(Binmahone) on 12/30/14. */ public class IIFlattenHiveJob extends AbstractHadoopJob { protected static final Logger log = LoggerFactory.getLogger(InvertedIndexJob.class); @Override public int run(String[] args) throws Exception { Options options = new Options(); try { options.addOption(OPTION_II_NAME); parseOptions(options, args); String iiname = getOptionValue(OPTION_II_NAME); KylinConfig config = KylinConfig.getInstanceFromEnv(); IIInstance iiInstance = IIManager.getInstance(config).getII(iiname); IIDesc iidesc = IIDescManager.getInstance(config).getIIDesc(iiInstance.getDescName()); String jobUUID = "00bf87b5-c7b5-4420-a12a-07f6b37b3187"; JobEngineConfig engineConfig = new JobEngineConfig(config); IJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(iidesc); final String useDatabaseHql = "USE " + engineConfig.getConfig().getHiveDatabaseForIntermediateTable() + ";"; String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, // JobInstance.getJobWorkingDir(jobUUID, engineConfig.getHdfsWorkingDirectory()), jobUUID); String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, engineConfig); StringBuffer buf = new StringBuffer(); buf.append("hive -e \""); buf.append(useDatabaseHql + "\n"); buf.append(dropTableHql + "\n"); buf.append(createTableHql + "\n"); buf.append(insertDataHqls + "\n"); buf.append("\""); System.out.println(buf.toString()); System.out.println("========================"); ShellCmd cmd = new ShellCmd(buf.toString(), null, null, null, false); ICommandOutput output = cmd.execute(); System.out.println(output.getOutput()); System.out.println(output.getExitCode()); return 0; } catch (Exception e) { printUsage(options); throw e; } } public static void main(String[] args) throws Exception { IIFlattenHiveJob job = new IIFlattenHiveJob(); int exitCode = ToolRunner.run(job, args); System.exit(exitCode); } }
job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.job.hadoop.invertedindex; import org.apache.commons.cli.Options; import org.apache.hadoop.util.ToolRunner; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.invertedindex.IIDescManager; import org.apache.kylin.invertedindex.IIInstance; import org.apache.kylin.invertedindex.IIManager; import org.apache.kylin.invertedindex.model.IIDesc; import org.apache.kylin.job.JobInstance; import org.apache.kylin.job.JoinedFlatTable; import org.apache.kylin.job.cmd.ICommandOutput; import org.apache.kylin.job.cmd.ShellCmd; import org.apache.kylin.job.engine.JobEngineConfig; import org.apache.kylin.job.hadoop.AbstractHadoopJob; import org.apache.kylin.job.hadoop.hive.IIJoinedFlatTableDesc; import org.apache.kylin.job.hadoop.hive.IJoinedFlatTableDesc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by Hongbin Ma(Binmahone) on 12/30/14. */ public class IIFlattenHiveJob extends AbstractHadoopJob { protected static final Logger log = LoggerFactory.getLogger(InvertedIndexJob.class); @Override public int run(String[] args) throws Exception { Options options = new Options(); try { options.addOption(OPTION_II_NAME); parseOptions(options, args); String iiname = getOptionValue(OPTION_II_NAME); KylinConfig config = KylinConfig.getInstanceFromEnv(); IIInstance iiInstance = IIManager.getInstance(config).getII(iiname); IIDesc iidesc = IIDescManager.getInstance(config).getIIDesc(iiInstance.getDescName()); String jobUUID = "00bf87b5-c7b5-4420-a12a-07f6b37b3187"; JobEngineConfig engineConfig = new JobEngineConfig(config); IJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(iidesc); String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, // JobInstance.getJobWorkingDir(jobUUID, engineConfig.getHdfsWorkingDirectory()), jobUUID); String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, engineConfig); StringBuffer buf = new StringBuffer(); buf.append("hive -e \""); buf.append(dropTableHql + "\n"); buf.append(createTableHql + "\n"); buf.append(insertDataHqls + "\n"); buf.append("\""); System.out.println(buf.toString()); System.out.println("========================"); ShellCmd cmd = new ShellCmd(buf.toString(), null, null, null, false); ICommandOutput output = cmd.execute(); System.out.println(output.getOutput()); System.out.println(output.getExitCode()); return 0; } catch (Exception e) { printUsage(options); throw e; } } public static void main(String[] args) throws Exception { IIFlattenHiveJob job = new IIFlattenHiveJob(); int exitCode = ToolRunner.run(job, args); System.exit(exitCode); } }
KYLIN-998 allow configurable hive database in IIFlattenHiveJob
job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java
KYLIN-998 allow configurable hive database in IIFlattenHiveJob
Java
apache-2.0
1c6376c89dbc3a44b9e713f5871b9a73aa192bde
0
rainerh/camunda-bpm-platform,bentrm/camunda-bpm-platform,1and1/camunda-bpm-platform,falko/camunda-bpm-platform,clintmanning/new-empty,AlexMinsk/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,clintmanning/new-empty,clintmanning/new-empty,fouasnon/camunda-bpm-platform,holisticon/camunda-bpm-platform,menski/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,langfr/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,nagyistoce/camunda-bpm-platform,Sumitdahiya/camunda,filiphr/camunda-bpm-platform,falko/camunda-bpm-platform,plexiti/camunda-bpm-platform,Sumitdahiya/camunda,hupda-edpe/c,1and1/camunda-bpm-platform,langfr/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,skjolber/camunda-bpm-platform,langfr/camunda-bpm-platform,Sumitdahiya/camunda,bentrm/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,plexiti/camunda-bpm-platform,camunda/camunda-bpm-platform,skjolber/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,nibin/camunda-bpm-platform,joansmith/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,falko/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,jangalinski/camunda-bpm-platform,hupda-edpe/c,tcrossland/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,falko/camunda-bpm-platform,rainerh/camunda-bpm-platform,holisticon/camunda-bpm-platform,menski/camunda-bpm-platform,holisticon/camunda-bpm-platform,Sumitdahiya/camunda,LuisePufahl/camunda-bpm-platform_batchProcessing,AlexMinsk/camunda-bpm-platform,tcrossland/camunda-bpm-platform,nibin/camunda-bpm-platform,xasx/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,hupda-edpe/c,LuisePufahl/camunda-bpm-platform_batchProcessing,subhrajyotim/camunda-bpm-platform,filiphr/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,hupda-edpe/c,hawky-4s-/camunda-bpm-platform,filiphr/camunda-bpm-platform,fouasnon/camunda-bpm-platform,plexiti/camunda-bpm-platform,bentrm/camunda-bpm-platform,skjolber/camunda-bpm-platform,skjolber/camunda-bpm-platform,joansmith/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,falko/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,holisticon/camunda-bpm-platform,tcrossland/camunda-bpm-platform,plexiti/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,fouasnon/camunda-bpm-platform,camunda/camunda-bpm-platform,camunda/camunda-bpm-platform,langfr/camunda-bpm-platform,joansmith/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,fouasnon/camunda-bpm-platform,joansmith/camunda-bpm-platform,tcrossland/camunda-bpm-platform,nibin/camunda-bpm-platform,filiphr/camunda-bpm-platform,holisticon/camunda-bpm-platform,holisticon/camunda-bpm-platform,rainerh/camunda-bpm-platform,menski/camunda-bpm-platform,bentrm/camunda-bpm-platform,plexiti/camunda-bpm-platform,langfr/camunda-bpm-platform,xasx/camunda-bpm-platform,nibin/camunda-bpm-platform,Sumitdahiya/camunda,skjolber/camunda-bpm-platform,plexiti/camunda-bpm-platform,rainerh/camunda-bpm-platform,langfr/camunda-bpm-platform,xasx/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,jangalinski/camunda-bpm-platform,hupda-edpe/c,nagyistoce/camunda-bpm-platform,tcrossland/camunda-bpm-platform,bentrm/camunda-bpm-platform,jangalinski/camunda-bpm-platform,menski/camunda-bpm-platform,skjolber/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,LuisePufahl/camunda-bpm-platform_batchProcessing,camunda/camunda-bpm-platform,xasx/camunda-bpm-platform,jangalinski/camunda-bpm-platform,joansmith/camunda-bpm-platform,xasx/camunda-bpm-platform,nibin/camunda-bpm-platform,1and1/camunda-bpm-platform,1and1/camunda-bpm-platform,xasx/camunda-bpm-platform,tcrossland/camunda-bpm-platform,filiphr/camunda-bpm-platform,joansmith/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,camunda/camunda-bpm-platform,filiphr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,falko/camunda-bpm-platform,fouasnon/camunda-bpm-platform,jangalinski/camunda-bpm-platform,camunda/camunda-bpm-platform,nibin/camunda-bpm-platform,hupda-edpe/c,LuisePufahl/camunda-bpm-platform_batchProcessing,AlexMinsk/camunda-bpm-platform,fouasnon/camunda-bpm-platform,Sumitdahiya/camunda,bentrm/camunda-bpm-platform,rainerh/camunda-bpm-platform,rainerh/camunda-bpm-platform,menski/camunda-bpm-platform
package com.camunda.fox.tasklist; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.faces.bean.ViewScoped; import javax.inject.Inject; import javax.inject.Named; import org.activiti.engine.TaskService; import com.camunda.fox.tasklist.api.TaskListGroup; import com.camunda.fox.tasklist.api.TaskListIdentity; import com.camunda.fox.tasklist.api.TaskNavigationLink; import com.camunda.fox.tasklist.api.TasklistIdentityService; import com.camunda.fox.tasklist.api.TasklistUser; import com.camunda.fox.tasklist.event.SignOutEvent; import com.camunda.fox.tasklist.event.TaskNavigationLinkSelectedEvent; @Named @ViewScoped public class TaskNavigation implements Serializable { private static final Logger log = Logger.getLogger(TaskNavigation.class.getCanonicalName()); private static final long serialVersionUID = 1L; @Inject private TaskListIdentity currentIdentity; @Inject private TaskService taskService; @Inject private TasklistIdentityService tasklistIdentityService; @Inject private Event<TaskNavigationLinkSelectedEvent> taskNavigationLinkSelectedEvent; private MyTasksLink myTasksLink; private UnassignedTasksLink unassignedTasksLink; private List<GroupTasksLink> groupTasksLinks; private List<ColleaguesTasksLink> colleaguesTasksLinks; private TaskNavigationLink selected; @PostConstruct protected void init() { log.finest("initializing " + this.getClass().getSimpleName() + " (" + this + ")"); selected = getMyTasksLink(); selected.setActive(true); } public MyTasksLink getMyTasksLink() { if (myTasksLink == null) { long personalTasksCount = taskService.createTaskQuery().taskAssignee(currentIdentity.getCurrentUser().getUsername()).count(); myTasksLink = new MyTasksLink("My Tasks (" + personalTasksCount + ")", personalTasksCount, false); } return myTasksLink; } public UnassignedTasksLink getUnassignedTasksLink() { if (unassignedTasksLink == null) { long unassignedTasksCount = taskService.createTaskQuery().taskCandidateUser(currentIdentity.getCurrentUser().getUsername()).count(); unassignedTasksLink = new UnassignedTasksLink("Unassigned Tasks (" + unassignedTasksCount + ")", unassignedTasksCount, false); } return unassignedTasksLink; } public List<GroupTasksLink> getGroupTasksLinks() { if (groupTasksLinks == null) { groupTasksLinks = new ArrayList<GroupTasksLink>(); List<TaskListGroup> groups = tasklistIdentityService.getGroupsByUserId(currentIdentity.getCurrentUser().getUsername()); for (TaskListGroup taskListGroup : groups) { long groupTasksCount = taskService.createTaskQuery().taskCandidateGroup(taskListGroup.getGroupId()).count(); GroupTasksLink gourpLink = new GroupTasksLink(taskListGroup.getGroupName() + " (" + groupTasksCount + ")", groupTasksCount, taskListGroup.getGroupId(), false); groupTasksLinks.add(gourpLink); } } return groupTasksLinks; } public List<ColleaguesTasksLink> getColleaguesTasksLinks() { if (colleaguesTasksLinks == null) { colleaguesTasksLinks = new ArrayList<ColleaguesTasksLink>(); List<TasklistUser> colleagues = tasklistIdentityService.getColleaguesByUserId(currentIdentity.getCurrentUser().getUsername()); for (TasklistUser colleague : colleagues) { long colleagueTasksCount = taskService.createTaskQuery().taskAssignee(colleague.getUsername()).count(); ColleaguesTasksLink colleaguesLink = new ColleaguesTasksLink(colleague.getFirstname() + " " + colleague.getLastname() + " (" + colleagueTasksCount + ")", colleagueTasksCount, colleague.getUsername(), false); colleaguesTasksLinks.add(colleaguesLink); } } return colleaguesTasksLinks; } public void selectViaEvent(@Observes TaskNavigationLinkSelectedEvent taskNavigationLinkSelectedEvent) { if (!taskNavigationLinkSelectedEvent.getLink().equals(selected)) { if (selected != null) { selected.setActive(false); } selected = taskNavigationLinkSelectedEvent.getLink(); selected.setActive(true); } } public void reset(@Observes SignOutEvent signOutEvent) { myTasksLink = null; unassignedTasksLink = null; groupTasksLinks = null; } public void select(TaskNavigationLink link) { log.finest("Menu entry " + link + " was selected, firing TaskNavigationLinkSelectedEvent."); taskNavigationLinkSelectedEvent.fire(new TaskNavigationLinkSelectedEvent(link)); } }
webapps/tasklist/tasklist-webapp/src/main/java/com/camunda/fox/tasklist/TaskNavigation.java
package com.camunda.fox.tasklist; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.faces.bean.ViewScoped; import javax.inject.Inject; import javax.inject.Named; import org.activiti.engine.TaskService; import com.camunda.fox.tasklist.api.TaskListGroup; import com.camunda.fox.tasklist.api.TaskListIdentity; import com.camunda.fox.tasklist.api.TaskNavigationLink; import com.camunda.fox.tasklist.api.TasklistIdentityService; import com.camunda.fox.tasklist.api.TasklistUser; import com.camunda.fox.tasklist.event.SignOutEvent; import com.camunda.fox.tasklist.event.TaskNavigationLinkSelectedEvent; @Named @ViewScoped public class TaskNavigation implements Serializable { private static final Logger log = Logger.getLogger(TaskNavigation.class.getCanonicalName()); private static final long serialVersionUID = 1L; @Inject private TaskListIdentity currentIdentity; @Inject private TaskService taskService; @Inject private TasklistIdentityService tasklistIdentityService; @Inject private Event<TaskNavigationLinkSelectedEvent> taskNavigationLinkSelectedEvent; private MyTasksLink myTasksLink; private UnassignedTasksLink unassignedTasksLink; private List<GroupTasksLink> groupTasksLinks; private List<ColleaguesTasksLink> colleaguesTasksLinks; private TaskNavigationLink selected; @PostConstruct protected void init() { log.finest("initializing " + this.getClass().getSimpleName() + " (" + this + ")"); selected = getMyTasksLink(); selected.setActive(true); } public MyTasksLink getMyTasksLink() { if (myTasksLink == null) { long personalTasksCount = taskService.createTaskQuery().taskAssignee(currentIdentity.getCurrentUser().getUsername()).count(); myTasksLink = new MyTasksLink("My Tasks (" + personalTasksCount + ")", personalTasksCount, false); } return myTasksLink; } public UnassignedTasksLink getUnassignedTasksLink() { if (unassignedTasksLink == null) { long unassignedTasksCount = taskService.createTaskQuery().taskCandidateUser(currentIdentity.getCurrentUser().getUsername()).count(); unassignedTasksLink = new UnassignedTasksLink("Unassigned Tasks (" + unassignedTasksCount + ")", unassignedTasksCount, false); } return unassignedTasksLink; } public List<GroupTasksLink> getGroupTasksLinks() { if (groupTasksLinks == null) { groupTasksLinks = new ArrayList<GroupTasksLink>(); List<TaskListGroup> groups = tasklistIdentityService.getGroupsByUserId(currentIdentity.getCurrentUser().getUsername()); for (TaskListGroup taskListGroup : groups) { long groupTasksCount = taskService.createTaskQuery().taskCandidateGroup(taskListGroup.getGroupId()).count(); GroupTasksLink gourpLink = new GroupTasksLink(taskListGroup.getGroupName() + " (" + groupTasksCount + ")", groupTasksCount, taskListGroup.getGroupId(), false); groupTasksLinks.add(gourpLink); } } return groupTasksLinks; } public List<ColleaguesTasksLink> getColleaguesTasksLinks() { if (colleaguesTasksLinks == null) { colleaguesTasksLinks = new ArrayList<ColleaguesTasksLink>(); List<TasklistUser> colleagues = tasklistIdentityService.getColleaguesByUserId(currentIdentity.getCurrentUser().getUsername()); for (TasklistUser colleague : colleagues) { long colleagueTasksCount = taskService.createTaskQuery().taskAssignee(colleague.getUsername()).count(); ColleaguesTasksLink colleaguesLink = new ColleaguesTasksLink(colleague.getFirstname() + " " + colleague.getLastname() + " (" + colleagueTasksCount + ")", colleagueTasksCount, colleague.getUsername(), false); colleaguesTasksLinks.add(colleaguesLink); } } return colleaguesTasksLinks; } public void select(@Observes TaskNavigationLinkSelectedEvent taskNavigationLinkSelectedEvent) { if (!taskNavigationLinkSelectedEvent.getLink().equals(selected)) { if (selected != null) { selected.setActive(false); } selected = taskNavigationLinkSelectedEvent.getLink(); selected.setActive(true); } } public void reset(@SuppressWarnings("unused") @Observes SignOutEvent signOutEvent) { myTasksLink = null; unassignedTasksLink = null; groupTasksLinks = null; } public void select(TaskNavigationLink link) { log.finest("Menu entry " + link + " was selected, firing TaskNavigationLinkSelectedEvent."); taskNavigationLinkSelectedEvent.fire(new TaskNavigationLinkSelectedEvent(link)); } }
HEMERA-2460: avoided overloaded method name in tasklist to make it work on Glassfish
webapps/tasklist/tasklist-webapp/src/main/java/com/camunda/fox/tasklist/TaskNavigation.java
HEMERA-2460: avoided overloaded method name in tasklist to make it work on Glassfish
Java
apache-2.0
6e5fc123db9c5213a92493f48f9514fa5f9fa455
0
LeonoraG/s-case-core,s-case/s-case-core,LeonoraG/s-case-core
package eu.scasefp7.eclipse.core.ontology; /** * Class containing the source of the three ontologies. * * @author themis */ public class OntologySource { /** * Enumeration for the three types of ontologies. */ public enum OntologyType { /** Static ontology */ STATIC, /** Dynamic ontology */ DYNAMIC, /** Linked ontology */ LINKED } /** * Returns an ontology instance given its type. * * @param ontologyType the type of the ontology to be returned. * @return the instance of the ontology. */ public static final String getOntology(OntologyType ontologyType) { switch (ontologyType) { case STATIC: return StaticOntology; case DYNAMIC: return DynamicOntology; case LINKED: return LinkedOntology; default: return null; } } /** * The string of the static ontology. */ public final static String StaticOntology = "<?xml version=\"1.0\"?>\n" + "<rdf:RDF\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n" + "xmlns:protege=\"http://protege.stanford.edu/plugins/owl/protege#\"\n" + "xmlns:xsp=\"http://www.owl-ontologies.com/2005/08/07/xsp.owl#\"\n" + "xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n" + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n" + "xmlns:swrl=\"http://www.w3.org/2003/11/swrl#\"\n" + "xmlns:swrlb=\"http://www.w3.org/2003/11/swrlb#\"\n" + "xmlns=\"http://www.owl-ontologies.com/Ontology1273059028.owl#\"\n" + "xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\"\n" + "xml:base=\"file:/C:/Users/themis/Documents/SCASE/UEDINREQS/NewOntology/ParseIt/ImportAll/requirements.owl\">\n" + "<owl:Ontology rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</rdfs:subClassOf>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "</owl:disjointWith>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "</owl:disjointWith>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:disjointWith>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">refers to actions - mainly transitive verbs</rdfs:comment>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\">\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "</owl:disjointWith>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:disjointWith>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">refers to events ; usually expressed through verbs like 'happen', 'occur', 'arrive'</rdfs:comment>\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#manner\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#direction\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#goal\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</rdfs:subClassOf>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">states a condition or situation - 'to be', 'to appear'</rdfs:comment>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:disjointWith>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#source\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#extent\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#modality\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#time\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#useractor\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#location\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\">\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">refers to the verb phrase of the requirement</rdfs:comment>\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#system\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#external_system\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\">\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">describes part-of relationships</rdfs:comment>\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#theme\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:Class>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_concept_of_requirement\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_concept\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:subPropertyOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\"/>\n" + "</rdfs:subPropertyOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owned_by\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owns\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#source\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_requirement\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_project\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_related_to\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#relates_to\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#source\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_project\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_goal\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_goal_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#goal\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#occurs\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#acts_on\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#receives_action\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of_requirement\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_operation\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:subPropertyOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_goal_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#goal\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_goal\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_concept\">\n" + "<rdfs:subPropertyOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\"/>\n" + "</rdfs:subPropertyOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_concept_of_requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owns\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owned_by\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#receives_action\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#acts_on\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_compound_requirement_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#relates_to\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_related_to\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_operation\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:subPropertyOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of_requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#contains_negation\">\n" + "<rdfs:range>\n" + "<owl:DataRange>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">true</rdf:first>\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">false</rdf:first>\n" + "</owl:oneOf>\n" + "</owl:DataRange>\n" + "</rdfs:range>\n" + "</owl:DatatypeProperty>\n" + "</rdf:RDF>\n" + "\n" + "<!-- Created with Protege (with OWL Plugin 3.5, Build 663) http://protege.stanford.edu -->\n"; /** * The string of the dynamic ontology. */ public final static String DynamicOntology = "<?xml version=\"1.0\"?>\n" + "<rdf:RDF\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n" + "xmlns:swrlb=\"http://www.w3.org/2003/11/swrlb#\"\n" + "xmlns=\"http://www.owl-ontologies.com/Ontology1273059028.owl#\"\n" + "xmlns:xsp=\"http://www.owl-ontologies.com/2005/08/07/xsp.owl#\"\n" + "xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n" + "xmlns:protege=\"http://protege.stanford.edu/plugins/owl/protege#\"\n" + "xmlns:swrl=\"http://www.w3.org/2003/11/swrl#\"\n" + "xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\"\n" + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n" + "xml:base=\"file:/C:/Users/themis/workspaceSCase/ontologies/DynamicOntology.owl\">\n" + "<owl:Ontology rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PreCondition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Object\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InitialActivity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PostCondition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InitialActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_object_of_activity\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_object\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Object\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_diagram\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_diagram_of_project\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_satisfies_condition\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#condition_is_satisfied_by_activity\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_condition\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_activity\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_opposite_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_opposite_of\"/>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#SymmetricProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_activity\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_condition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has_condition\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_diagram\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PreCondition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PostCondition\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_target\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_target_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#condition_is_satisfied_by_activity\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_satisfies_condition\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InitialActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_action\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of_activity\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_diagram_of_project\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_diagram\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_action\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_property\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_object\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_object_of_activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_diagram\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PreCondition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PostCondition\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has_condition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_target_of\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_target\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_property\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of_activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_diagram\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_diagram\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activitytype\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DataRange>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">false</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">true</rdf:first>\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:DataRange>\n" + "</rdf:RDF>\n" + "\n" + "<!-- Created with Protege (with OWL Plugin 3.5, Build 663) http://protege.stanford.edu -->\n"; /** * The string of the linked ontology. */ public final static String LinkedOntology = "<?xml version=\"1.0\"?>\n" + "<rdf:RDF\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n" + "xmlns:swrlb=\"http://www.w3.org/2003/11/swrlb#\"\n" + "xmlns=\"http://www.owl-ontologies.com/Ontology1273059028.owl#\"\n" + "xmlns:xsp=\"http://www.owl-ontologies.com/2005/08/07/xsp.owl#\"\n" + "xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n" + "xmlns:protege=\"http://protege.stanford.edu/plugins/owl/protege#\"\n" + "xmlns:swrl=\"http://www.w3.org/2003/11/swrl#\"\n" + "xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\"\n" + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n" + "xml:base=\"file:/C:/Users/user/Desktop/ontologies/LinkedOntology.owl\">\n" + "<owl:Ontology rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + "></rdfs:comment>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PrimitiveDatatype\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">false</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">true</rdf:first>\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">String</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Integer</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Double</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Char</rdf:first>\n" + "</rdf:rest>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Boolean</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Float</rdf:first>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "</owl:Class>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_query_parameters\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_query_parameter_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_action\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_query_parameter_of\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_query_parameters\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_input\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_input_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_output\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_output_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_input_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_input\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_elements_of\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PrimitiveDatatype\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_elements\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_previous_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_next_activity\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_element_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_element\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity_diagram\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_diagram_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_of\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_operation\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_operation\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_element\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_element_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_elements\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PrimitiveDatatype\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_elements_of\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_output_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_output\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_diagram_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity_diagram\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#element_is_contained_in\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#contains_element\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_requirement_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_requirement\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_requirement\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_requirement_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#contains_element\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#element_is_contained_in\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_next_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_previous_activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasURIParameters\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasName\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activitytype\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isType\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Primitive</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Object</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Array</rdf:first>\n" + "</owl:oneOf>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasResourcePath\">\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#belongsToURL\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasResponseType\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">XML</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">JSON</rdf:first>\n" + "</owl:oneOf>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#belongsToWSType\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">RESTful</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">SOAP</rdf:first>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isExternalService\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#boolean\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasCRUDVerb\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">GET</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">PUT</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">POST</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">DELETE</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">UPDATE</rdf:first>\n" + "</rdf:rest>\n" + "</rdf:rest>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:FunctionalProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isOptional\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DatatypeProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#boolean\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:FunctionalProperty>\n" + "<owl:FunctionalProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isAuthToken\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DatatypeProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#boolean\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:FunctionalProperty>\n" + "<owl:FunctionalProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasDefaultValue\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DatatypeProperty\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:FunctionalProperty>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#integer\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#string\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#char\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#boolean\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#float\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#double\"/>\n" + "</rdf:RDF>\n" + "\n" + "<!-- Created with Protege (with OWL Plugin 3.5, Build 663) http://protege.stanford.edu -->\n"; }
eu.scasefp7.eclipse.core/src/eu/scasefp7/eclipse/core/ontology/OntologySource.java
package eu.scasefp7.eclipse.core.ontology; /** * Class containing the source of the three ontologies. * * @author themis */ public class OntologySource { /** * Enumeration for the three types of ontologies. */ public enum OntologyType { /** Static ontology */ STATIC, /** Dynamic ontology */ DYNAMIC, /** Linked ontology */ LINKED } /** * Returns an ontology instance given its type. * * @param ontologyType the type of the ontology to be returned. * @return the instance of the ontology. */ public static final String getOntology(OntologyType ontologyType) { switch (ontologyType) { case STATIC: return StaticOntology; case DYNAMIC: return DynamicOntology; case LINKED: return LinkedOntology; default: return null; } } /** * The string of the static ontology. */ public final static String StaticOntology = "<?xml version=\"1.0\"?>\n" + "<rdf:RDF\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n" + "xmlns:protege=\"http://protege.stanford.edu/plugins/owl/protege#\"\n" + "xmlns:xsp=\"http://www.owl-ontologies.com/2005/08/07/xsp.owl#\"\n" + "xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n" + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n" + "xmlns:swrl=\"http://www.w3.org/2003/11/swrl#\"\n" + "xmlns:swrlb=\"http://www.w3.org/2003/11/swrlb#\"\n" + "xmlns=\"http://www.owl-ontologies.com/Ontology1273059028.owl#\"\n" + "xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\"\n" + "xml:base=\"file:/C:/Users/themis/Documents/SCASE/UEDINREQS/NewOntology/ParseIt/ImportAll/requirements.owl\">\n" + "<owl:Ontology rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</rdfs:subClassOf>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "</owl:disjointWith>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "</owl:disjointWith>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:disjointWith>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">refers to actions - mainly transitive verbs</rdfs:comment>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\">\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "</owl:disjointWith>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:disjointWith>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">refers to events ; usually expressed through verbs like 'happen', 'occur', 'arrive'</rdfs:comment>\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#manner\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#direction\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#goal\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</rdfs:subClassOf>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">states a condition or situation - 'to be', 'to appear'</rdfs:comment>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:disjointWith>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:disjointWith>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#source\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#extent\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#modality\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#time\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#useractor\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#location\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\">\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">refers to the verb phrase of the requirement</rdfs:comment>\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#system\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#external_system\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\">\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#state\"/>\n" + "<owl:disjointWith rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">describes part-of relationships</rdfs:comment>\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#theme\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:Class>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_concept_of_requirement\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_concept\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:subPropertyOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\"/>\n" + "</rdfs:subPropertyOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#actor\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owned_by\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owns\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#source\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_requirement\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_project\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_related_to\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#relates_to\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#source\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_project\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_goal\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_goal_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#goal\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#occurs\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#emergence\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#acts_on\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#receives_action\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of_requirement\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_operation\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:subPropertyOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_goal_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#goal\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_goal\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_concept\">\n" + "<rdfs:subPropertyOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\"/>\n" + "</rdfs:subPropertyOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_concept_of_requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owns\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#owned_by\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ownership\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ThingType\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#consist_requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#receives_action\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#property\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#action\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#acts_on\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_compound_requirement_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#relates_to\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_related_to\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_has_operation\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:subPropertyOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#requirement_consists_of\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of_requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OperationType\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#contains_negation\">\n" + "<rdfs:range>\n" + "<owl:DataRange>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">true</rdf:first>\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">false</rdf:first>\n" + "</owl:oneOf>\n" + "</owl:DataRange>\n" + "</rdfs:range>\n" + "</owl:DatatypeProperty>\n" + "</rdf:RDF>\n" + "\n" + "<!-- Created with Protege (with OWL Plugin 3.5, Build 663) http://protege.stanford.edu -->\n"; /** * The string of the dynamic ontology. */ public final static String DynamicOntology = "<?xml version=\"1.0\"?>\n" + "<rdf:RDF\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n" + "xmlns:swrlb=\"http://www.w3.org/2003/11/swrlb#\"\n" + "xmlns=\"http://www.owl-ontologies.com/Ontology1273059028.owl#\"\n" + "xmlns:xsp=\"http://www.owl-ontologies.com/2005/08/07/xsp.owl#\"\n" + "xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n" + "xmlns:protege=\"http://protege.stanford.edu/plugins/owl/protege#\"\n" + "xmlns:swrl=\"http://www.w3.org/2003/11/swrl#\"\n" + "xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\"\n" + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n" + "xml:base=\"file:/C:/Users/themis/workspaceSCase/ontologies/DynamicOntology.owl\">\n" + "<owl:Ontology rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PreCondition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Object\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InitialActivity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PostCondition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InitialActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_object_of_activity\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_object\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Object\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_diagram\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_diagram_of_project\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_satisfies_condition\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#condition_is_satisfied_by_activity\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_condition\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_activity\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_opposite_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_opposite_of\"/>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#SymmetricProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_actor_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_actor\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_activity\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_condition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has_condition\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_diagram\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PreCondition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PostCondition\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_target\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_target_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#condition_is_satisfied_by_activity\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_satisfies_condition\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_source_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InitialActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_source\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_action\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of_activity\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#GuardCondition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_diagram_of_project\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#project_has_diagram\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_action\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_property\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_object\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Object\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_object_of_activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of_diagram\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PreCondition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PostCondition\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has_condition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_target_of\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#FinalActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_target\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activity_has_property\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of_activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_diagram\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_of_diagram\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Actor\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Transition\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#AnyActivity\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#diagram_has\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activitytype\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DataRange>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">false</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">true</rdf:first>\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:DataRange>\n" + "</rdf:RDF>\n" + "\n" + "<!-- Created with Protege (with OWL Plugin 3.5, Build 663) http://protege.stanford.edu -->\n"; /** * The string of the linked ontology. */ public final static String LinkedOntology = "<?xml version=\"1.0\"?>\n" + "<rdf:RDF\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n" + "xmlns:swrlb=\"http://www.w3.org/2003/11/swrlb#\"\n" + "xmlns=\"http://www.owl-ontologies.com/Ontology1273059028.owl#\"\n" + "xmlns:xsp=\"http://www.owl-ontologies.com/2005/08/07/xsp.owl#\"\n" + "xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n" + "xmlns:protege=\"http://protege.stanford.edu/plugins/owl/protege#\"\n" + "xmlns:swrl=\"http://www.w3.org/2003/11/swrl#\"\n" + "xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\"\n" + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n" + "xml:base=\"file:/C:/Users/user/Desktop/ontologies/LinkedOntology.owl\">\n" + "<owl:Ontology rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\">\n" + "<rdfs:subClassOf>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</rdfs:subClassOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Concept\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "<rdfs:comment rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + "></rdfs:comment>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PrimitiveDatatype\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:Class>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\">\n" + "<rdfs:subClassOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">false</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#boolean\"\n" + ">true</rdf:first>\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "<owl:Class>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">String</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Integer</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Double</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Char</rdf:first>\n" + "</rdf:rest>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Boolean</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Float</rdf:first>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "</owl:Class>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_query_parameters\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_query_parameter_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_action\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_query_parameter_of\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_query_parameters\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_input\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_input_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_output\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_output_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_input_of\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_input\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_elements_of\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PrimitiveDatatype\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_elements\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_previous_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_next_activity\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_element_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_element\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity_diagram\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_diagram_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_of\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_operation\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_operation_of\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_operation\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_condition\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Condition\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_condition_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_element\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_element_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_property_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_property\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Property\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_action_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Action\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_action\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_elements\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#PrimitiveDatatype\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Representation\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_elements_of\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_output_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_output\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_activity_diagram_of\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_activity_diagram\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#element_is_contained_in\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#contains_element\"/>\n" + "</owl:inverseOf>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_requirement_of\">\n" + "<owl:inverseOf>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_requirement\"/>\n" + "</owl:inverseOf>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_requirement\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#is_requirement_of\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Project\"/>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#contains_element\">\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#element_is_contained_in\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Requirement\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#ActivityDiagram\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Element\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:ObjectProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_next_activity\">\n" + "<rdfs:range rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "<owl:inverseOf rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#has_previous_activity\"/>\n" + "</owl:ObjectProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasURIParameters\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasName\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#activitytype\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Activity\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isType\">\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#OutputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Primitive</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Object</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">Array</rdf:first>\n" + "</owl:oneOf>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasResourcePath\">\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#belongsToURL\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#string\"/>\n" + "<rdfs:domain>\n" + "<owl:Class>\n" + "<owl:unionOf rdf:parseType=\"Collection\">\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<owl:Class rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:unionOf>\n" + "</owl:Class>\n" + "</rdfs:domain>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasResponseType\">\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">XML</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">JSON</rdf:first>\n" + "</owl:oneOf>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#belongsToWSType\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">RESTful</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">SOAP</rdf:first>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isExternalService\">\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Resource\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#boolean\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:DatatypeProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasCRUDVerb\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#FunctionalProperty\"/>\n" + "<rdfs:range>\n" + "<owl:Class>\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DataRange\"/>\n" + "<owl:oneOf rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">GET</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">PUT</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">POST</rdf:first>\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:parseType=\"Resource\">\n" + "<rdf:rest rdf:resource=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#nil\"/>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">DELETE</rdf:first>\n" + "</rdf:rest>\n" + "<rdf:first rdf:datatype=\"http://www.w3.org/2001/XMLSchema#string\"\n" + ">UPDATE</rdf:first>\n" + "</rdf:rest>\n" + "</rdf:rest>\n" + "</rdf:rest>\n" + "</owl:oneOf>\n" + "</owl:Class>\n" + "</rdfs:range>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Operation\"/>\n" + "</owl:DatatypeProperty>\n" + "<owl:FunctionalProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isOptional\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DatatypeProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#boolean\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:FunctionalProperty>\n" + "<owl:FunctionalProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#isAuthToken\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DatatypeProperty\"/>\n" + "<rdfs:range rdf:resource=\"http://www.w3.org/2001/XMLSchema#boolean\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:FunctionalProperty>\n" + "<owl:FunctionalProperty rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#hasDefaultValue\">\n" + "<rdf:type rdf:resource=\"http://www.w3.org/2002/07/owl#DatatypeProperty\"/>\n" + "<rdfs:domain rdf:resource=\"http://www.owl-ontologies.com/Ontology1273059028.owl#InputRepresentation\"/>\n" + "</owl:FunctionalProperty>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#object\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#integer\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#string\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#char\"/>\n" + "<Project rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#Restmarks\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#boolean\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#float\"/>\n" + "<PrimitiveDatatype rdf:about=\"http://www.owl-ontologies.com/Ontology1273059028.owl#double\"/>\n" + "</rdf:RDF>\n" + "\n" + "<!-- Created with Protege (with OWL Plugin 3.5, Build 663) http://protege.stanford.edu -->\n"; }
Minor fix in linked ontology source for including default project
eu.scasefp7.eclipse.core/src/eu/scasefp7/eclipse/core/ontology/OntologySource.java
Minor fix in linked ontology source for including default project
Java
apache-2.0
9e42dc3d6050bb07008fcb991826c826de942fa5
0
azaytsev/ios-driver,azaytsev/ios-driver,masbog/ios-driver,masbog/ios-driver,shutkou/ios-driver,adataylor/ios-driver,azaytsev/ios-driver,seem-sky/ios-driver,ios-driver/ios-driver,shutkou/ios-driver,darraghgrace/ios-driver,ios-driver/ios-driver,azaytsev/ios-driver,seem-sky/ios-driver,darraghgrace/ios-driver,darraghgrace/ios-driver,adataylor/ios-driver,crashlytics/ios-driver,shutkou/ios-driver,seem-sky/ios-driver,azaytsev/ios-driver,crashlytics/ios-driver,shutkou/ios-driver,shutkou/ios-driver,adataylor/ios-driver,adataylor/ios-driver,masbog/ios-driver,masbog/ios-driver,darraghgrace/ios-driver,ios-driver/ios-driver,ios-driver/ios-driver,masbog/ios-driver,ios-driver/ios-driver,adataylor/ios-driver,crashlytics/ios-driver,shutkou/ios-driver,crashlytics/ios-driver,darraghgrace/ios-driver,seem-sky/ios-driver,crashlytics/ios-driver
package org.uiautomation.ios.server.command.uiautomation; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.uiautomation.ios.communication.WebDriverLikeRequest; import org.uiautomation.ios.server.IOSDriver; import org.uiautomation.ios.server.command.UIAScriptHandler; public class SetValueNativeHandler extends UIAScriptHandler{ private static final String voidTemplate = "var parent = UIAutomation.cache.get(:reference);" + "parent:jsMethod;" + "UIAutomation.createJSONResponse(':sessionId',0,'')"; public SetValueNativeHandler(IOSDriver driver, WebDriverLikeRequest request) { super(driver, request); try { JSONArray array =request.getPayload().getJSONArray("value"); String value = array.getString(0); String corrected = value.replaceAll("\\\\", "\\\\\\\\"); corrected = corrected.replaceAll("\\n", "\\\\n"); corrected = corrected.replaceAll("\\t", "\\\\t"); String js = voidTemplate .replace(":sessionId", request.getSession()) .replace(":reference", request.getVariableValue(":reference")) .replace(":jsMethod", ".setValue('"+corrected+"')"); setJS(js); } catch (JSONException e) { e.printStackTrace(); } } @Override public JSONObject configurationDescription() throws JSONException { return noConfigDefined(); } }
server/src/main/java/org/uiautomation/ios/server/command/uiautomation/SetValueNativeHandler.java
package org.uiautomation.ios.server.command.uiautomation; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.uiautomation.ios.communication.WebDriverLikeRequest; import org.uiautomation.ios.server.IOSDriver; import org.uiautomation.ios.server.command.UIAScriptHandler; public class SetValueNativeHandler extends UIAScriptHandler{ private static final String voidTemplate = "var parent = UIAutomation.cache.get(:reference);" + "parent:jsMethod;" + "UIAutomation.createJSONResponse(':sessionId',0,'')"; public SetValueNativeHandler(IOSDriver driver, WebDriverLikeRequest request) { super(driver, request); try { JSONArray array =request.getPayload().getJSONArray("value"); String value = array.getString(0); String corrected = value.replaceAll("\\\\", "\\\\\\\\"); corrected = corrected.replaceAll("\\n", "\\\\n"); corrected = corrected.replaceAll("\\t", "\\\\t"); String js = voidTemplate .replace(":sessionId", request.getSession()) .replace(":reference", request.getVariableValue(":reference")) .replace(":jsMethod", ".setValue('"+corrected+"')"); System.out.println(js); setJS(js); } catch (JSONException e) { e.printStackTrace(); } } @Override public JSONObject configurationDescription() throws JSONException { return noConfigDefined(); } }
removing sysout
server/src/main/java/org/uiautomation/ios/server/command/uiautomation/SetValueNativeHandler.java
removing sysout
Java
apache-2.0
9ba6e54bd9386109d8c955debccb3d5d083f121b
0
HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,HubSpot/Singularity
package com.hubspot.singularity.mesos; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import javax.inject.Singleton; import org.apache.mesos.v1.Protos; import org.apache.mesos.v1.Protos.AgentID; import org.apache.mesos.v1.Protos.ExecutorID; import org.apache.mesos.v1.Protos.InverseOffer; import org.apache.mesos.v1.Protos.MasterInfo; import org.apache.mesos.v1.Protos.Offer; import org.apache.mesos.v1.Protos.OfferID; import org.apache.mesos.v1.Protos.TaskID; import org.apache.mesos.v1.Protos.TaskStatus; import org.apache.mesos.v1.scheduler.Protos.Event; import org.apache.mesos.v1.scheduler.Protos.Event.Failure; import org.apache.mesos.v1.scheduler.Protos.Event.Message; import org.apache.mesos.v1.scheduler.Protos.Event.Subscribed; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.codahale.metrics.annotation.Timed; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.google.inject.name.Named; import com.hubspot.mesos.JavaUtils; import com.hubspot.singularity.RequestCleanupType; import com.hubspot.singularity.SingularityAbort; import com.hubspot.singularity.SingularityAbort.AbortReason; import com.hubspot.singularity.SingularityAction; import com.hubspot.singularity.SingularityKilledTaskIdRecord; import com.hubspot.singularity.SingularityMainModule; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskDestroyFrameworkMessage; import com.hubspot.singularity.SingularityTaskId; import com.hubspot.singularity.TaskCleanupType; import com.hubspot.singularity.config.MesosConfiguration; import com.hubspot.singularity.config.SingularityConfiguration; import com.hubspot.singularity.data.DisasterManager; import com.hubspot.singularity.data.TaskManager; import com.hubspot.singularity.data.transcoders.Transcoder; import com.hubspot.singularity.helpers.MesosProtosUtils; import com.hubspot.singularity.helpers.MesosUtils; import com.hubspot.singularity.mesos.SingularityOfferCache.CachedOffer; import com.hubspot.singularity.mesos.SingularitySlaveAndRackManager.CheckResult; import com.hubspot.singularity.scheduler.SingularityLeaderCacheCoordinator; import com.hubspot.singularity.sentry.SingularityExceptionNotifier; import io.netty.handler.codec.PrematureChannelClosureException; @Singleton public class SingularityMesosSchedulerImpl extends SingularityMesosScheduler { private static final Logger LOG = LoggerFactory.getLogger(SingularityMesosScheduler.class); private final SingularityExceptionNotifier exceptionNotifier; private final SingularityStartup startup; private final SingularityAbort abort; private final SingularityLeaderCacheCoordinator leaderCacheCoordinator; private final SingularityMesosFrameworkMessageHandler messageHandler; private final SingularitySlaveAndRackManager slaveAndRackManager; private final DisasterManager disasterManager; private final OfferCache offerCache; private final SingularityMesosOfferScheduler offerScheduler; private final SingularityMesosStatusUpdateHandler statusUpdateHandler; private final SingularityMesosSchedulerClient mesosSchedulerClient; private final boolean offerCacheEnabled; private final boolean delayWhenStatusUpdateDeltaTooLarge; private final long delayWhenDeltaOverMs; private final AtomicLong statusUpdateDeltaAvg; private final AtomicLong lastHeartbeatTime; private final SingularityConfiguration configuration; private final TaskManager taskManager; private final Transcoder<SingularityTaskDestroyFrameworkMessage> transcoder; private final SingularitySchedulerLock lock; private volatile SchedulerState state; private Optional<Long> lastOfferTimestamp = Optional.absent(); private Optional<Double> heartbeatIntervalSeconds = Optional.absent(); private final AtomicReference<MasterInfo> masterInfo = new AtomicReference<>(); private final List<TaskStatus> queuedUpdates; @Inject SingularityMesosSchedulerImpl(SingularitySchedulerLock lock, SingularityExceptionNotifier exceptionNotifier, SingularityStartup startup, SingularityLeaderCacheCoordinator leaderCacheCoordinator, SingularityAbort abort, SingularityMesosFrameworkMessageHandler messageHandler, SingularitySlaveAndRackManager slaveAndRackManager, OfferCache offerCache, SingularityMesosOfferScheduler offerScheduler, SingularityMesosStatusUpdateHandler statusUpdateHandler, SingularityMesosSchedulerClient mesosSchedulerClient, DisasterManager disasterManager, SingularityConfiguration configuration, TaskManager taskManager, Transcoder<SingularityTaskDestroyFrameworkMessage> transcoder, @Named(SingularityMainModule.STATUS_UPDATE_DELTA_30S_AVERAGE) AtomicLong statusUpdateDeltaAvg, @Named(SingularityMainModule.LAST_MESOS_MASTER_HEARTBEAT_TIME) AtomicLong lastHeartbeatTime) { this.exceptionNotifier = exceptionNotifier; this.startup = startup; this.abort = abort; this.messageHandler = messageHandler; this.slaveAndRackManager = slaveAndRackManager; this.disasterManager = disasterManager; this.offerCache = offerCache; this.offerScheduler = offerScheduler; this.statusUpdateHandler = statusUpdateHandler; this.mesosSchedulerClient = mesosSchedulerClient; this.offerCacheEnabled = configuration.isCacheOffers(); this.delayWhenStatusUpdateDeltaTooLarge = configuration.isDelayOfferProcessingForLargeStatusUpdateDelta(); this.delayWhenDeltaOverMs = configuration.getDelayPollersWhenDeltaOverMs(); this.statusUpdateDeltaAvg = statusUpdateDeltaAvg; this.lastHeartbeatTime = lastHeartbeatTime; this.taskManager = taskManager; this.transcoder = transcoder; this.leaderCacheCoordinator = leaderCacheCoordinator; this.queuedUpdates = Lists.newArrayList(); this.lock = lock; this.state = SchedulerState.NOT_STARTED; this.configuration = configuration; } @Override public void subscribed(Subscribed subscribed) { callWithStateLock(() -> { Preconditions.checkState(state == SchedulerState.NOT_STARTED, "Asked to startup - but in invalid state: %s", state.name()); double advertisedHeartbeatIntervalSeconds = subscribed.getHeartbeatIntervalSeconds(); if (advertisedHeartbeatIntervalSeconds > 0) { heartbeatIntervalSeconds = Optional.of(advertisedHeartbeatIntervalSeconds); } // Should be called before activation of leader cache or cache could be left empty startup.checkMigrations(); leaderCacheCoordinator.activateLeaderCache(); MasterInfo newMasterInfo = subscribed.getMasterInfo(); masterInfo.set(newMasterInfo); startup.startup(newMasterInfo); state = SchedulerState.SUBSCRIBED; queuedUpdates.forEach(this::handleStatusUpdateAsync); }, "subscribed", false); } @Timed @Override public void resourceOffers(List<Offer> offers) { if (!isRunning()) { LOG.info("Scheduler is in state {}, declining {} offer(s)", state.name(), offers.size()); mesosSchedulerClient.decline(offers.stream().map(Offer::getId).collect(Collectors.toList())); return; } callWithOffersLock(() -> { final long start = System.currentTimeMillis(); lastOfferTimestamp = Optional.of(start); LOG.info("Received {} offer(s)", offers.size()); boolean delclineImmediately = false; if (disasterManager.isDisabled(SingularityAction.PROCESS_OFFERS)) { LOG.info("Processing offers is currently disabled, declining {} offers", offers.size()); delclineImmediately = true; } if (delayWhenStatusUpdateDeltaTooLarge && statusUpdateDeltaAvg.get() > delayWhenDeltaOverMs) { LOG.info("Status update delta is too large ({}), declining offers while status updates catch up", statusUpdateDeltaAvg.get()); delclineImmediately = true; } if (delclineImmediately) { mesosSchedulerClient.decline(offers.stream().map(Offer::getId).collect(Collectors.toList())); return; } if (offerCacheEnabled) { if (disasterManager.isDisabled(SingularityAction.CACHE_OFFERS)) { offerCache.disableOfferCache(); } else { offerCache.enableOfferCache(); } } List<Offer> offersToCheck = new ArrayList<>(offers); List<CachedOffer> cachedOfferList = offerCache.checkoutOffers(); Map<String, CachedOffer> cachedOffers = new HashMap<>(); for (CachedOffer cachedOffer : cachedOfferList) { cachedOffers.put(cachedOffer.getOfferId(), cachedOffer); offersToCheck.add(cachedOffer.getOffer()); } offers.parallelStream().forEach((offer) -> { if (offer.getId() == null) { LOG.warn("Received offer with null ID, skipping ({})", offer); return; } String rolesInfo = MesosUtils.getRoles(offer).toString(); LOG.debug("Received offer ID {} with roles {} from {} ({}) for {} cpu(s), {} memory, {} ports, and {} disk", offer.getId().getValue(), rolesInfo, offer.getHostname(), offer.getAgentId().getValue(), MesosUtils.getNumCpus(offer), MesosUtils.getMemory(offer), MesosUtils.getNumPorts(offer), MesosUtils.getDisk(offer)); CheckResult checkResult = slaveAndRackManager.checkOffer(offer); if (checkResult == CheckResult.NOT_ACCEPTING_TASKS) { mesosSchedulerClient.decline(Collections.singletonList(offer.getId())); offersToCheck.remove(offer); LOG.debug("Will decline offer {}, slave {} is not currently in a state to launch tasks", offer.getId().getValue(), offer.getHostname()); } }); final Set<OfferID> acceptedOffers = Sets.newHashSetWithExpectedSize(offersToCheck.size()); try { Collection<SingularityOfferHolder> offerHolders = offerScheduler.checkOffers(offersToCheck); for (SingularityOfferHolder offerHolder : offerHolders) { if (!offerHolder.getAcceptedTasks().isEmpty()) { List<Offer> leftoverOffers = offerHolder.launchTasksAndGetUnusedOffers(mesosSchedulerClient); leftoverOffers.forEach((o) -> { if (cachedOffers.containsKey(o.getId().getValue())) { offerCache.returnOffer(cachedOffers.remove(o.getId().getValue())); } else { offerCache.cacheOffer(start, o); } }); List<Offer> offersAcceptedFromSlave = offerHolder.getOffers(); offersAcceptedFromSlave.removeAll(leftoverOffers); offersAcceptedFromSlave.stream() .filter((offer) -> cachedOffers.containsKey(offer.getId().getValue())) .map((o) -> cachedOffers.remove(o.getId().getValue())) .forEach(offerCache::useOffer); acceptedOffers.addAll(offersAcceptedFromSlave.stream().map(Offer::getId).collect(Collectors.toList())); } else { offerHolder.getOffers().forEach((o) -> { if (cachedOffers.containsKey(o.getId().getValue())) { offerCache.returnOffer(cachedOffers.remove(o.getId().getValue())); } else { offerCache.cacheOffer(start, o); } }); } } LOG.info("{} remaining offers not accounted for in offer check", cachedOffers.size()); cachedOffers.values().forEach(offerCache::returnOffer); } catch (Throwable t) { LOG.error("Received fatal error while handling offers - will decline all available offers", t); mesosSchedulerClient.decline(offersToCheck.stream() .filter((o) -> !acceptedOffers.contains(o.getId()) && !cachedOffers.containsKey(o.getId().getValue())) .map(Offer::getId) .collect(Collectors.toList())); offersToCheck.forEach((o) -> { if (cachedOffers.containsKey(o.getId().getValue())) { offerCache.returnOffer(cachedOffers.get(o.getId().getValue())); } }); throw t; } LOG.info("Finished handling {} new offer(s) ({}), {} accepted, {} declined/cached", offers.size(), JavaUtils.duration(start), acceptedOffers.size(), offers.size() - acceptedOffers.size()); }, "resourceOffers"); } @Override public void inverseOffers(List<InverseOffer> offers) { LOG.debug("Singularity is currently not able to handle inverse offers events"); } @Override public void rescind(OfferID offerId) { if (!isRunning()) { LOG.warn("Received rescind when not running for offer {}", offerId.getValue()); } callWithOffersLock(() -> offerCache.rescindOffer(offerId), "rescind"); } @Override public void rescindInverseOffer(OfferID offerId) { LOG.debug("Singularity is currently not able to handle inverse offers events"); } @Override public CompletableFuture<Boolean> statusUpdate(TaskStatus status) { if (!isRunning()) { LOG.info("Scheduler is in state {}, queueing an update {} - {} queued updates so far", state.name(), status, queuedUpdates.size()); queuedUpdates.add(status); return CompletableFuture.completedFuture(false); } try { return handleStatusUpdateAsync(status); } catch (Throwable t) { LOG.error("Scheduler threw an uncaught exception", t); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(t)); return CompletableFuture.completedFuture(false); } } @Override public void message(Message message) { ExecutorID executorID = message.getExecutorId(); AgentID slaveId = message.getAgentId(); byte[] data = message.getData().toByteArray(); LOG.info("Framework message from executor {} on slave {} with {} bytes of data", executorID, slaveId, data.length); messageHandler.handleMessage(executorID, slaveId, data); } @Override public void failure(Failure failure) { if (failure.hasExecutorId()) { LOG.warn("Lost an executor {} on slave {} with status {}", failure.getExecutorId(), failure.getAgentId(), failure.getStatus()); } else { slaveLost(failure.getAgentId()); } } @Override public void error(String message) { callWithStateLock(() -> { LOG.error("Aborting due to error: {}", message); notifyStopping(); abort.abort(AbortReason.MESOS_ERROR, Optional.absent()); }, "error", true); } @Override public void heartbeat(Event event) { long now = System.currentTimeMillis(); long delta = (now - lastHeartbeatTime.getAndSet(now)); LOG.debug("Heartbeat from mesos. Delta since last heartbeat is {}ms", delta); } @Override public void onUncaughtException(Throwable t) { LOG.error("uncaught exception", t); callWithStateLock(() -> { if (t instanceof PrematureChannelClosureException) { LOG.error("Lost connection to the mesos master, aborting", t); notifyStopping(); abort.abort(AbortReason.LOST_MESOS_CONNECTION, Optional.of(t)); } else { LOG.error("Aborting due to error: {}", t.getMessage(), t); notifyStopping(); abort.abort(AbortReason.MESOS_ERROR, Optional.of(t)); } }, "errorUncaughtException", true); } @Override public void onConnectException(Throwable t) { callWithStateLock(() -> { LOG.error("Unable to connect to mesos master {}", t.getMessage(), t); try { start(); } catch (Throwable startThrowable) { LOG.error("Unable to retry mesos master connection", startThrowable); notifyStopping(); abort.abort(AbortReason.MESOS_ERROR, Optional.of(startThrowable)); } }, "errorConnectException", false); } @Override public long getEventBufferSize() { return configuration.getMesosConfiguration().getRxEventBufferSize(); } public void start() throws Exception { MesosConfiguration mesosConfiguration = configuration.getMesosConfiguration(); // If more than one host is provided choose at random, we will be redirected if the host is not the master List<String> masters = Arrays.asList(mesosConfiguration.getMaster().split(",")); String nextMaster = masters.get(new Random().nextInt(masters.size())); if (!nextMaster.startsWith("http")) { nextMaster = "http://" + nextMaster; } URI masterUri = URI.create(nextMaster); mesosSchedulerClient.subscribe(new URI( masterUri.getScheme() == null ? "http" : masterUri.getScheme(), masterUri.getUserInfo(), masterUri.getHost(), masterUri.getPort(), Strings.isNullOrEmpty(masterUri.getPath()) ? "/api/v1/scheduler" : masterUri.getPath(), masterUri.getQuery(), masterUri.getFragment() ), this); } private void callWithOffersLock(Runnable function, String method) { if (!isRunning()) { LOG.info("Ignoring {} because scheduler isn't running ({})", method, state); return; } try { lock.runWithOffersLock(function, String.format("%s#%s", getClass().getSimpleName(), method)); } catch (Throwable t) { LOG.error("Scheduler threw an uncaught exception - exiting", t); exceptionNotifier.notify(String.format("Scheduler threw an uncaught exception (%s)", t.getMessage()), t); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(t)); } } private void callWithStateLock(Runnable function, String name, boolean ignoreIfNotRunning) { if (ignoreIfNotRunning && !isRunning()) { LOG.info("Ignoring {} because scheduler isn't running ({})", name, state); return; } try { lock.runWithStateLock(function, name); } catch (Throwable t) { LOG.error("Scheduler threw an uncaught exception - exiting", t); exceptionNotifier.notify(String.format("Scheduler threw an uncaught exception (%s)", t.getMessage()), t); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(t)); } } public void notifyStopping() { LOG.info("Scheduler is moving to stopped, current state: {}", state); state = SchedulerState.STOPPED; leaderCacheCoordinator.stopLeaderCache(); mesosSchedulerClient.close(); LOG.info("Scheduler now in state: {}", state); } public boolean isRunning() { return state == SchedulerState.SUBSCRIBED; } public void setSubscribed() { callWithStateLock(() -> state = SchedulerState.SUBSCRIBED, "setSubscribed", false); } public Optional<MasterInfo> getMaster() { return Optional.fromNullable(masterInfo.get()); } public void slaveLost(Protos.AgentID slaveId) { LOG.warn("Lost a slave {}", slaveId); slaveAndRackManager.slaveLost(slaveId); } public Optional<Long> getLastOfferTimestamp() { return lastOfferTimestamp; } public Optional<Double> getHeartbeatIntervalSeconds() { return heartbeatIntervalSeconds; } public void killAndRecord(SingularityTaskId taskId, Optional<RequestCleanupType> requestCleanupType, Optional<TaskCleanupType> taskCleanupType, Optional<Long> originalTimestamp, Optional<Integer> retries, Optional<String> user) { Preconditions.checkState(isRunning()); Optional<TaskCleanupType> maybeCleanupFromRequestAndTask = getTaskCleanupType(requestCleanupType, taskCleanupType); if (maybeCleanupFromRequestAndTask.isPresent() && (maybeCleanupFromRequestAndTask.get() == TaskCleanupType.USER_REQUESTED_DESTROY || maybeCleanupFromRequestAndTask.get() == TaskCleanupType.REQUEST_DELETING)) { Optional<SingularityTask> task = taskManager.getTask(taskId); if (task.isPresent()) { if (task.get().getTaskRequest().getDeploy().getCustomExecutorCmd().isPresent()) { byte[] messageBytes = transcoder.toBytes(new SingularityTaskDestroyFrameworkMessage(taskId, user)); mesosSchedulerClient.frameworkMessage( MesosProtosUtils.toExecutorId(task.get().getMesosTask().getExecutor().getExecutorId()), MesosProtosUtils.toAgentId(task.get().getMesosTask().getAgentId()), messageBytes ); } else { LOG.warn("Not using custom executor, will not send framework message to destroy task"); } } else { String message = String.format("No task data available to build kill task framework message for task %s", taskId); exceptionNotifier.notify(message); LOG.error(message); } } mesosSchedulerClient.kill(TaskID.newBuilder().setValue(taskId.toString()).build()); taskManager.saveKilledRecord(new SingularityKilledTaskIdRecord(taskId, System.currentTimeMillis(), originalTimestamp.or(System.currentTimeMillis()), requestCleanupType, taskCleanupType, retries.or(-1) + 1)); } private Optional<TaskCleanupType> getTaskCleanupType(Optional<RequestCleanupType> requestCleanupType, Optional<TaskCleanupType> taskCleanupType) { if (taskCleanupType.isPresent()) { return taskCleanupType; } else { if (requestCleanupType.isPresent()) { return requestCleanupType.get().getTaskCleanupType(); } return Optional.absent(); } } public SchedulerState getState() { return state; } private CompletableFuture<Boolean> handleStatusUpdateAsync(TaskStatus status) { long start = System.currentTimeMillis(); return statusUpdateHandler.processStatusUpdateAsync(status) .whenCompleteAsync((result, throwable) -> { if (throwable != null) { LOG.error("Scheduler threw an uncaught exception processing status updates", throwable); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(throwable)); } if (status.hasUuid()) { mesosSchedulerClient.acknowledge(status.getAgentId(), status.getTaskId(), status.getUuid()); } LOG.debug("Handled status update for {} in {}", status.getTaskId().getValue(), JavaUtils.duration(start)); }); } }
SingularityService/src/main/java/com/hubspot/singularity/mesos/SingularityMesosSchedulerImpl.java
package com.hubspot.singularity.mesos; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import javax.inject.Singleton; import org.apache.mesos.v1.Protos; import org.apache.mesos.v1.Protos.AgentID; import org.apache.mesos.v1.Protos.ExecutorID; import org.apache.mesos.v1.Protos.InverseOffer; import org.apache.mesos.v1.Protos.MasterInfo; import org.apache.mesos.v1.Protos.Offer; import org.apache.mesos.v1.Protos.OfferID; import org.apache.mesos.v1.Protos.TaskID; import org.apache.mesos.v1.Protos.TaskStatus; import org.apache.mesos.v1.scheduler.Protos.Event; import org.apache.mesos.v1.scheduler.Protos.Event.Failure; import org.apache.mesos.v1.scheduler.Protos.Event.Message; import org.apache.mesos.v1.scheduler.Protos.Event.Subscribed; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.codahale.metrics.annotation.Timed; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.google.inject.name.Named; import com.hubspot.mesos.JavaUtils; import com.hubspot.singularity.RequestCleanupType; import com.hubspot.singularity.SingularityAbort; import com.hubspot.singularity.SingularityAbort.AbortReason; import com.hubspot.singularity.SingularityAction; import com.hubspot.singularity.SingularityKilledTaskIdRecord; import com.hubspot.singularity.SingularityMainModule; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskDestroyFrameworkMessage; import com.hubspot.singularity.SingularityTaskId; import com.hubspot.singularity.TaskCleanupType; import com.hubspot.singularity.config.MesosConfiguration; import com.hubspot.singularity.config.SingularityConfiguration; import com.hubspot.singularity.data.DisasterManager; import com.hubspot.singularity.data.TaskManager; import com.hubspot.singularity.data.transcoders.Transcoder; import com.hubspot.singularity.helpers.MesosProtosUtils; import com.hubspot.singularity.helpers.MesosUtils; import com.hubspot.singularity.mesos.SingularityOfferCache.CachedOffer; import com.hubspot.singularity.mesos.SingularitySlaveAndRackManager.CheckResult; import com.hubspot.singularity.scheduler.SingularityLeaderCacheCoordinator; import com.hubspot.singularity.sentry.SingularityExceptionNotifier; import io.netty.handler.codec.PrematureChannelClosureException; @Singleton public class SingularityMesosSchedulerImpl extends SingularityMesosScheduler { private static final Logger LOG = LoggerFactory.getLogger(SingularityMesosScheduler.class); private final SingularityExceptionNotifier exceptionNotifier; private final SingularityStartup startup; private final SingularityAbort abort; private final SingularityLeaderCacheCoordinator leaderCacheCoordinator; private final SingularityMesosFrameworkMessageHandler messageHandler; private final SingularitySlaveAndRackManager slaveAndRackManager; private final DisasterManager disasterManager; private final OfferCache offerCache; private final SingularityMesosOfferScheduler offerScheduler; private final SingularityMesosStatusUpdateHandler statusUpdateHandler; private final SingularityMesosSchedulerClient mesosSchedulerClient; private final boolean offerCacheEnabled; private final boolean delayWhenStatusUpdateDeltaTooLarge; private final long delayWhenDeltaOverMs; private final AtomicLong statusUpdateDeltaAvg; private final AtomicLong lastHeartbeatTime; private final SingularityConfiguration configuration; private final TaskManager taskManager; private final Transcoder<SingularityTaskDestroyFrameworkMessage> transcoder; private final SingularitySchedulerLock lock; private volatile SchedulerState state; private Optional<Long> lastOfferTimestamp = Optional.absent(); private Optional<Double> heartbeatIntervalSeconds = Optional.absent(); private final AtomicReference<MasterInfo> masterInfo = new AtomicReference<>(); private final List<TaskStatus> queuedUpdates; @Inject SingularityMesosSchedulerImpl(SingularitySchedulerLock lock, SingularityExceptionNotifier exceptionNotifier, SingularityStartup startup, SingularityLeaderCacheCoordinator leaderCacheCoordinator, SingularityAbort abort, SingularityMesosFrameworkMessageHandler messageHandler, SingularitySlaveAndRackManager slaveAndRackManager, OfferCache offerCache, SingularityMesosOfferScheduler offerScheduler, SingularityMesosStatusUpdateHandler statusUpdateHandler, SingularityMesosSchedulerClient mesosSchedulerClient, DisasterManager disasterManager, SingularityConfiguration configuration, TaskManager taskManager, Transcoder<SingularityTaskDestroyFrameworkMessage> transcoder, @Named(SingularityMainModule.STATUS_UPDATE_DELTA_30S_AVERAGE) AtomicLong statusUpdateDeltaAvg, @Named(SingularityMainModule.LAST_MESOS_MASTER_HEARTBEAT_TIME) AtomicLong lastHeartbeatTime) { this.exceptionNotifier = exceptionNotifier; this.startup = startup; this.abort = abort; this.messageHandler = messageHandler; this.slaveAndRackManager = slaveAndRackManager; this.disasterManager = disasterManager; this.offerCache = offerCache; this.offerScheduler = offerScheduler; this.statusUpdateHandler = statusUpdateHandler; this.mesosSchedulerClient = mesosSchedulerClient; this.offerCacheEnabled = configuration.isCacheOffers(); this.delayWhenStatusUpdateDeltaTooLarge = configuration.isDelayOfferProcessingForLargeStatusUpdateDelta(); this.delayWhenDeltaOverMs = configuration.getDelayPollersWhenDeltaOverMs(); this.statusUpdateDeltaAvg = statusUpdateDeltaAvg; this.lastHeartbeatTime = lastHeartbeatTime; this.taskManager = taskManager; this.transcoder = transcoder; this.leaderCacheCoordinator = leaderCacheCoordinator; this.queuedUpdates = Lists.newArrayList(); this.lock = lock; this.state = SchedulerState.NOT_STARTED; this.configuration = configuration; } @Override public void subscribed(Subscribed subscribed) { callWithStateLock(() -> { Preconditions.checkState(state == SchedulerState.NOT_STARTED, "Asked to startup - but in invalid state: %s", state.name()); double advertisedHeartbeatIntervalSeconds = subscribed.getHeartbeatIntervalSeconds(); if (advertisedHeartbeatIntervalSeconds > 0) { heartbeatIntervalSeconds = Optional.of(advertisedHeartbeatIntervalSeconds); } // Should be called before activation of leader cache or cache could be left empty startup.checkMigrations(); leaderCacheCoordinator.activateLeaderCache(); MasterInfo newMasterInfo = subscribed.getMasterInfo(); masterInfo.set(newMasterInfo); startup.startup(newMasterInfo); state = SchedulerState.SUBSCRIBED; queuedUpdates.forEach(this::handleStatusUpdateAsync); }, "subscribed", false); } @Timed @Override public void resourceOffers(List<Offer> offers) { if (!isRunning()) { LOG.info("Scheduler is in state {}, declining {} offer(s)", state.name(), offers.size()); mesosSchedulerClient.decline(offers.stream().map(Offer::getId).collect(Collectors.toList())); return; } callWithOffersLock(() -> { final long start = System.currentTimeMillis(); lastOfferTimestamp = Optional.of(start); LOG.info("Received {} offer(s)", offers.size()); boolean delclineImmediately = false; if (disasterManager.isDisabled(SingularityAction.PROCESS_OFFERS)) { LOG.info("Processing offers is currently disabled, declining {} offers", offers.size()); delclineImmediately = true; } if (delayWhenStatusUpdateDeltaTooLarge && statusUpdateDeltaAvg.get() > delayWhenDeltaOverMs) { LOG.info("Status update delta is too large ({}), declining offers while status updates catch up", statusUpdateDeltaAvg.get()); delclineImmediately = true; } if (delclineImmediately) { mesosSchedulerClient.decline(offers.stream().map(Offer::getId).collect(Collectors.toList())); return; } if (offerCacheEnabled) { if (disasterManager.isDisabled(SingularityAction.CACHE_OFFERS)) { offerCache.disableOfferCache(); } else { offerCache.enableOfferCache(); } } List<Offer> offersToCheck = new ArrayList<>(offers); List<CachedOffer> cachedOfferList = offerCache.checkoutOffers(); Map<String, CachedOffer> cachedOffers = new HashMap<>(); for (CachedOffer cachedOffer : cachedOfferList) { cachedOffers.put(cachedOffer.getOfferId(), cachedOffer); offersToCheck.add(cachedOffer.getOffer()); } offers.parallelStream().forEach((offer) -> { String rolesInfo = MesosUtils.getRoles(offer).toString(); LOG.debug("Received offer ID {} with roles {} from {} ({}) for {} cpu(s), {} memory, {} ports, and {} disk", offer.getId().getValue(), rolesInfo, offer.getHostname(), offer.getAgentId().getValue(), MesosUtils.getNumCpus(offer), MesosUtils.getMemory(offer), MesosUtils.getNumPorts(offer), MesosUtils.getDisk(offer)); CheckResult checkResult = slaveAndRackManager.checkOffer(offer); if (checkResult == CheckResult.NOT_ACCEPTING_TASKS) { mesosSchedulerClient.decline(Collections.singletonList(offer.getId())); offersToCheck.remove(offer); LOG.debug("Will decline offer {}, slave {} is not currently in a state to launch tasks", offer.getId().getValue(), offer.getHostname()); } }); final Set<OfferID> acceptedOffers = Sets.newHashSetWithExpectedSize(offersToCheck.size()); try { Collection<SingularityOfferHolder> offerHolders = offerScheduler.checkOffers(offersToCheck); for (SingularityOfferHolder offerHolder : offerHolders) { if (!offerHolder.getAcceptedTasks().isEmpty()) { List<Offer> leftoverOffers = offerHolder.launchTasksAndGetUnusedOffers(mesosSchedulerClient); leftoverOffers.forEach((o) -> { if (cachedOffers.containsKey(o.getId().getValue())) { offerCache.returnOffer(cachedOffers.remove(o.getId().getValue())); } else { offerCache.cacheOffer(start, o); } }); List<Offer> offersAcceptedFromSlave = offerHolder.getOffers(); offersAcceptedFromSlave.removeAll(leftoverOffers); offersAcceptedFromSlave.stream() .filter((offer) -> cachedOffers.containsKey(offer.getId().getValue())) .map((o) -> cachedOffers.remove(o.getId().getValue())) .forEach(offerCache::useOffer); acceptedOffers.addAll(offersAcceptedFromSlave.stream().map(Offer::getId).collect(Collectors.toList())); } else { offerHolder.getOffers().forEach((o) -> { if (cachedOffers.containsKey(o.getId().getValue())) { offerCache.returnOffer(cachedOffers.remove(o.getId().getValue())); } else { offerCache.cacheOffer(start, o); } }); } } LOG.info("{} remaining offers not accounted for in offer check", cachedOffers.size()); cachedOffers.values().forEach(offerCache::returnOffer); } catch (Throwable t) { LOG.error("Received fatal error while handling offers - will decline all available offers", t); mesosSchedulerClient.decline(offersToCheck.stream() .filter((o) -> !acceptedOffers.contains(o.getId()) && !cachedOffers.containsKey(o.getId().getValue())) .map(Offer::getId) .collect(Collectors.toList())); offersToCheck.forEach((o) -> { if (cachedOffers.containsKey(o.getId().getValue())) { offerCache.returnOffer(cachedOffers.get(o.getId().getValue())); } }); throw t; } LOG.info("Finished handling {} new offer(s) ({}), {} accepted, {} declined/cached", offers.size(), JavaUtils.duration(start), acceptedOffers.size(), offers.size() - acceptedOffers.size()); }, "resourceOffers"); } @Override public void inverseOffers(List<InverseOffer> offers) { LOG.debug("Singularity is currently not able to handle inverse offers events"); } @Override public void rescind(OfferID offerId) { if (!isRunning()) { LOG.warn("Received rescind when not running for offer {}", offerId.getValue()); } callWithOffersLock(() -> offerCache.rescindOffer(offerId), "rescind"); } @Override public void rescindInverseOffer(OfferID offerId) { LOG.debug("Singularity is currently not able to handle inverse offers events"); } @Override public CompletableFuture<Boolean> statusUpdate(TaskStatus status) { if (!isRunning()) { LOG.info("Scheduler is in state {}, queueing an update {} - {} queued updates so far", state.name(), status, queuedUpdates.size()); queuedUpdates.add(status); return CompletableFuture.completedFuture(false); } try { return handleStatusUpdateAsync(status); } catch (Throwable t) { LOG.error("Scheduler threw an uncaught exception", t); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(t)); return CompletableFuture.completedFuture(false); } } @Override public void message(Message message) { ExecutorID executorID = message.getExecutorId(); AgentID slaveId = message.getAgentId(); byte[] data = message.getData().toByteArray(); LOG.info("Framework message from executor {} on slave {} with {} bytes of data", executorID, slaveId, data.length); messageHandler.handleMessage(executorID, slaveId, data); } @Override public void failure(Failure failure) { if (failure.hasExecutorId()) { LOG.warn("Lost an executor {} on slave {} with status {}", failure.getExecutorId(), failure.getAgentId(), failure.getStatus()); } else { slaveLost(failure.getAgentId()); } } @Override public void error(String message) { callWithStateLock(() -> { LOG.error("Aborting due to error: {}", message); notifyStopping(); abort.abort(AbortReason.MESOS_ERROR, Optional.absent()); }, "error", true); } @Override public void heartbeat(Event event) { long now = System.currentTimeMillis(); long delta = (now - lastHeartbeatTime.getAndSet(now)); LOG.debug("Heartbeat from mesos. Delta since last heartbeat is {}ms", delta); } @Override public void onUncaughtException(Throwable t) { LOG.error("uncaught exception", t); callWithStateLock(() -> { if (t instanceof PrematureChannelClosureException) { LOG.error("Lost connection to the mesos master, aborting", t); notifyStopping(); abort.abort(AbortReason.LOST_MESOS_CONNECTION, Optional.of(t)); } else { LOG.error("Aborting due to error: {}", t.getMessage(), t); notifyStopping(); abort.abort(AbortReason.MESOS_ERROR, Optional.of(t)); } }, "errorUncaughtException", true); } @Override public void onConnectException(Throwable t) { callWithStateLock(() -> { LOG.error("Unable to connect to mesos master {}", t.getMessage(), t); try { start(); } catch (Throwable startThrowable) { LOG.error("Unable to retry mesos master connection", startThrowable); notifyStopping(); abort.abort(AbortReason.MESOS_ERROR, Optional.of(startThrowable)); } }, "errorConnectException", false); } @Override public long getEventBufferSize() { return configuration.getMesosConfiguration().getRxEventBufferSize(); } public void start() throws Exception { MesosConfiguration mesosConfiguration = configuration.getMesosConfiguration(); // If more than one host is provided choose at random, we will be redirected if the host is not the master List<String> masters = Arrays.asList(mesosConfiguration.getMaster().split(",")); String nextMaster = masters.get(new Random().nextInt(masters.size())); if (!nextMaster.startsWith("http")) { nextMaster = "http://" + nextMaster; } URI masterUri = URI.create(nextMaster); mesosSchedulerClient.subscribe(new URI( masterUri.getScheme() == null ? "http" : masterUri.getScheme(), masterUri.getUserInfo(), masterUri.getHost(), masterUri.getPort(), Strings.isNullOrEmpty(masterUri.getPath()) ? "/api/v1/scheduler" : masterUri.getPath(), masterUri.getQuery(), masterUri.getFragment() ), this); } private void callWithOffersLock(Runnable function, String method) { if (!isRunning()) { LOG.info("Ignoring {} because scheduler isn't running ({})", method, state); return; } try { lock.runWithOffersLock(function, String.format("%s#%s", getClass().getSimpleName(), method)); } catch (Throwable t) { LOG.error("Scheduler threw an uncaught exception - exiting", t); exceptionNotifier.notify(String.format("Scheduler threw an uncaught exception (%s)", t.getMessage()), t); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(t)); } } private void callWithStateLock(Runnable function, String name, boolean ignoreIfNotRunning) { if (ignoreIfNotRunning && !isRunning()) { LOG.info("Ignoring {} because scheduler isn't running ({})", name, state); return; } try { lock.runWithStateLock(function, name); } catch (Throwable t) { LOG.error("Scheduler threw an uncaught exception - exiting", t); exceptionNotifier.notify(String.format("Scheduler threw an uncaught exception (%s)", t.getMessage()), t); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(t)); } } public void notifyStopping() { LOG.info("Scheduler is moving to stopped, current state: {}", state); state = SchedulerState.STOPPED; leaderCacheCoordinator.stopLeaderCache(); mesosSchedulerClient.close(); LOG.info("Scheduler now in state: {}", state); } public boolean isRunning() { return state == SchedulerState.SUBSCRIBED; } public void setSubscribed() { callWithStateLock(() -> state = SchedulerState.SUBSCRIBED, "setSubscribed", false); } public Optional<MasterInfo> getMaster() { return Optional.fromNullable(masterInfo.get()); } public void slaveLost(Protos.AgentID slaveId) { LOG.warn("Lost a slave {}", slaveId); slaveAndRackManager.slaveLost(slaveId); } public Optional<Long> getLastOfferTimestamp() { return lastOfferTimestamp; } public Optional<Double> getHeartbeatIntervalSeconds() { return heartbeatIntervalSeconds; } public void killAndRecord(SingularityTaskId taskId, Optional<RequestCleanupType> requestCleanupType, Optional<TaskCleanupType> taskCleanupType, Optional<Long> originalTimestamp, Optional<Integer> retries, Optional<String> user) { Preconditions.checkState(isRunning()); Optional<TaskCleanupType> maybeCleanupFromRequestAndTask = getTaskCleanupType(requestCleanupType, taskCleanupType); if (maybeCleanupFromRequestAndTask.isPresent() && (maybeCleanupFromRequestAndTask.get() == TaskCleanupType.USER_REQUESTED_DESTROY || maybeCleanupFromRequestAndTask.get() == TaskCleanupType.REQUEST_DELETING)) { Optional<SingularityTask> task = taskManager.getTask(taskId); if (task.isPresent()) { if (task.get().getTaskRequest().getDeploy().getCustomExecutorCmd().isPresent()) { byte[] messageBytes = transcoder.toBytes(new SingularityTaskDestroyFrameworkMessage(taskId, user)); mesosSchedulerClient.frameworkMessage( MesosProtosUtils.toExecutorId(task.get().getMesosTask().getExecutor().getExecutorId()), MesosProtosUtils.toAgentId(task.get().getMesosTask().getAgentId()), messageBytes ); } else { LOG.warn("Not using custom executor, will not send framework message to destroy task"); } } else { String message = String.format("No task data available to build kill task framework message for task %s", taskId); exceptionNotifier.notify(message); LOG.error(message); } } mesosSchedulerClient.kill(TaskID.newBuilder().setValue(taskId.toString()).build()); taskManager.saveKilledRecord(new SingularityKilledTaskIdRecord(taskId, System.currentTimeMillis(), originalTimestamp.or(System.currentTimeMillis()), requestCleanupType, taskCleanupType, retries.or(-1) + 1)); } private Optional<TaskCleanupType> getTaskCleanupType(Optional<RequestCleanupType> requestCleanupType, Optional<TaskCleanupType> taskCleanupType) { if (taskCleanupType.isPresent()) { return taskCleanupType; } else { if (requestCleanupType.isPresent()) { return requestCleanupType.get().getTaskCleanupType(); } return Optional.absent(); } } public SchedulerState getState() { return state; } private CompletableFuture<Boolean> handleStatusUpdateAsync(TaskStatus status) { long start = System.currentTimeMillis(); return statusUpdateHandler.processStatusUpdateAsync(status) .whenCompleteAsync((result, throwable) -> { if (throwable != null) { LOG.error("Scheduler threw an uncaught exception processing status updates", throwable); notifyStopping(); abort.abort(AbortReason.UNRECOVERABLE_ERROR, Optional.of(throwable)); } if (status.hasUuid()) { mesosSchedulerClient.acknowledge(status.getAgentId(), status.getTaskId(), status.getUuid()); } LOG.debug("Handled status update for {} in {}", status.getTaskId().getValue(), JavaUtils.duration(start)); }); } }
Skip offers with null id
SingularityService/src/main/java/com/hubspot/singularity/mesos/SingularityMesosSchedulerImpl.java
Skip offers with null id
Java
apache-2.0
3421d398841c87d2e5f09d57a4d7f6bba1d4c0de
0
nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch
public class Verifier { public Verifier (boolean debug) { _debug = debug; } private boolean _debug; }
AdventOfCode/2020/day5/Verifier.java
Update Verifier.java
AdventOfCode/2020/day5/Verifier.java
Update Verifier.java
Java
apache-2.0
0d9bcf36d26aeadbc6e292558eaecb298d121531
0
Cloud2nd/LBaaS,Cloud2nd/LBaaS
package com.exactsix.mibaas.lecture.service; import java.util.ArrayList; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.exactsix.mibaas.common.response.RestResponse; import com.exactsix.mibaas.lecture.dto.LectureDto; import com.exactsix.mibaas.lecture.repository.LectureRepository; import com.exactsix.mibaas.lecture.repository.dto.LectureRepositoryDto; import com.exactsix.mibaas.lecture.service.search.LectureElasticSearchService; import com.exactsix.mibaas.lecture.util.LectureUtil; ; /** * <pre> * Class Name : LectureService.java * Description : * Modification Information * * 수정일       수정자    수정내용 * ──────────── ───────── ─────────────────────────────── * 2013. 4. 23. dave 최초생성 * </pre> * * @author dave * @since 2014. 9. 16. * @version 1.0 * * Copyright (C) 2012 by BEANY All right reserved. */ @Component public class LectureService { private LectureRepository lectureRepository; @Autowired private LectureElasticSearchService search; public LectureService() { super(); // TODO Auto-generated constructor stub } @Autowired public LectureService(LectureRepository lectureRepository) { super(); this.lectureRepository = lectureRepository; } /** * <pre> * 강좌 등록 * </pre> * * @param lectureDto * @return * @throws Exception */ public RestResponse createLecture(LectureDto lectureDto) { String lectureCode = LectureUtil.getUUID(); // make lecture repository data LectureRepositoryDto repositoryDto = new LectureRepositoryDto(); repositoryDto.setKey(LectureUtil.getLectureKey(lectureCode)); repositoryDto.setLectureCode(lectureCode); repositoryDto.setLectureName(lectureDto.getLectureName()); repositoryDto.setLectureType(lectureDto.getLectureType()); repositoryDto.setLectureLanguage(lectureDto.getLectureLanguage()); if (lectureDto.getLectureThumbnail() != null && lectureDto.getLectureThumbnail() != "") { repositoryDto.setLectureThumbnail(lectureDto.getLectureThumbnail()); } // save db repositoryDto = lectureRepository.save(repositoryDto); // make response message RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("강좌가 정상적으로 등록되었습니다"); return response; } public RestResponse getLecture(String lecturecode) { // Get DB RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("ok"); LectureRepositoryDto repositoryDto = lectureRepository .findOne("lecture::" + lecturecode); // setting lecture dto LectureDto lectureDto = new LectureDto(); lectureDto.setLectureName(repositoryDto.getLectureName()); lectureDto.setLectureCode(repositoryDto.getLectureCode()); lectureDto.setLectureLanguage(repositoryDto.getLectureLanguage()); lectureDto.setLectureType(repositoryDto.getLectureType()); lectureDto.setLectureThumbnail(repositoryDto.getLectureThumbnail()); response.setData(lectureDto); // return return response; } /** * <pre> * 강좌 리스트를 불러오는 서비스 입니다. * </pre> * * @return * @throws Exception */ public RestResponse getLectureList() { List<String> keys = search.test(); String[] tests = keys.toArray(new String[keys.size()]); // Get DB RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("ok"); List<LectureDto> lectureList = new ArrayList<LectureDto>(); for (String test : tests) { LectureRepositoryDto repositoryDto = lectureRepository .findOne(test); // setting lecture dto LectureDto lectureDto = new LectureDto(); lectureDto.setLectureName(repositoryDto.getLectureName()); lectureDto.setLectureCode(repositoryDto.getLectureCode()); lectureDto.setLectureLanguage(repositoryDto.getLectureLanguage()); lectureDto.setLectureType(repositoryDto.getLectureType()); lectureDto.setLectureThumbnail(repositoryDto.getLectureThumbnail()); // Need Approve List<String> needApproveKeys = search .getNotApproveUser(repositoryDto.getLectureCode()); lectureDto.setNeedApprove(needApproveKeys.size()); lectureList.add(lectureDto); } response.setData(lectureList); // return return response; } public RestResponse getProgressCourseList() { List<String> keys = search.getProgressCourse(); List<String> lectureKeys = new ArrayList<String>(); for (String key : keys) { String[] tmp = key.split("::"); lectureKeys.add(LectureUtil.getLectureKey(tmp[1])); } String[] tests = lectureKeys.toArray(new String[lectureKeys.size()]); // Get DB RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("ok"); List<LectureDto> lectureList = new ArrayList<LectureDto>(); for (String test : tests) { LectureRepositoryDto repositoryDto = lectureRepository .findOne(test); // setting lecture dto LectureDto lectureDto = new LectureDto(); lectureDto.setLectureName(repositoryDto.getLectureName()); lectureDto.setLectureCode(repositoryDto.getLectureCode()); lectureDto.setLectureLanguage(repositoryDto.getLectureLanguage()); lectureDto.setLectureType(repositoryDto.getLectureType()); lectureList.add(lectureDto); } response.setData(lectureList); // return return response; } // }
baas-server/baas-server/src/main/java/com/exactsix/mibaas/lecture/service/LectureService.java
package com.exactsix.mibaas.lecture.service; import java.util.ArrayList; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.exactsix.mibaas.common.response.RestResponse; import com.exactsix.mibaas.lecture.dto.LectureDto; import com.exactsix.mibaas.lecture.repository.LectureRepository; import com.exactsix.mibaas.lecture.repository.dto.LectureRepositoryDto; import com.exactsix.mibaas.lecture.service.search.LectureElasticSearchService; import com.exactsix.mibaas.lecture.util.LectureUtil; ; /** * <pre> * Class Name : LectureService.java * Description : * Modification Information * * 수정일       수정자    수정내용 * ──────────── ───────── ─────────────────────────────── * 2013. 4. 23. dave 최초생성 * </pre> * * @author dave * @since 2014. 9. 16. * @version 1.0 * * Copyright (C) 2012 by BEANY All right reserved. */ @Component public class LectureService { private LectureRepository lectureRepository; @Autowired private LectureElasticSearchService search; public LectureService() { super(); // TODO Auto-generated constructor stub } @Autowired public LectureService(LectureRepository lectureRepository) { super(); this.lectureRepository = lectureRepository; } /** * <pre> * 강좌 등록 * </pre> * * @param lectureDto * @return * @throws Exception */ public RestResponse createLecture(LectureDto lectureDto) { String lectureCode = LectureUtil.getUUID(); // make lecture repository data LectureRepositoryDto repositoryDto = new LectureRepositoryDto(); repositoryDto.setKey(LectureUtil.getLectureKey(lectureCode)); repositoryDto.setLectureCode(lectureCode); repositoryDto.setLectureName(lectureDto.getLectureName()); repositoryDto.setLectureType(lectureDto.getLectureType()); repositoryDto.setLectureLanguage(lectureDto.getLectureLanguage()); if (lectureDto.getLectureThumbnail() != null && lectureDto.getLectureThumbnail() != "") { repositoryDto.setLectureThumbnail(lectureDto.getLectureThumbnail()); } // save db repositoryDto = lectureRepository.save(repositoryDto); // make response message RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("강좌가 정상적으로 등록되었습니다"); return response; } public RestResponse getLecture(String lecturecode) { // Get DB RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("ok"); LectureRepositoryDto repositoryDto = lectureRepository .findOne("lecture::" + lecturecode); // setting lecture dto LectureDto lectureDto = new LectureDto(); lectureDto.setLectureName(repositoryDto.getLectureName()); lectureDto.setLectureCode(repositoryDto.getLectureCode()); lectureDto.setLectureLanguage(repositoryDto.getLectureLanguage()); lectureDto.setLectureType(repositoryDto.getLectureType()); lectureDto.setLectureThumbnail(repositoryDto.getLectureThumbnail()); response.setData(lectureDto); // return return response; } /** * <pre> * 강좌 리스트를 불러오는 서비스 입니다. * </pre> * * @return * @throws Exception */ public RestResponse getLectureList() { List<String> keys = search.test(); String[] tests = keys.toArray(new String[keys.size()]); // Get DB RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("ok"); List<LectureDto> lectureList = new ArrayList<LectureDto>(); for (String test : tests) { LectureRepositoryDto repositoryDto = lectureRepository .findOne(test); // setting lecture dto LectureDto lectureDto = new LectureDto(); lectureDto.setLectureName(repositoryDto.getLectureName()); lectureDto.setLectureCode(repositoryDto.getLectureCode()); lectureDto.setLectureLanguage(repositoryDto.getLectureLanguage()); lectureDto.setLectureType(repositoryDto.getLectureType()); // Need Approve List<String> needApproveKeys = search .getNotApproveUser(repositoryDto.getLectureCode()); lectureDto.setNeedApprove(needApproveKeys.size()); System.out.println(needApproveKeys.size()); lectureList.add(lectureDto); } response.setData(lectureList); // return return response; } public RestResponse getProgressCourseList() { List<String> keys = search.getProgressCourse(); List<String> lectureKeys = new ArrayList<String>(); for (String key : keys) { String[] tmp = key.split("::"); lectureKeys.add(LectureUtil.getLectureKey(tmp[1])); } String[] tests = lectureKeys.toArray(new String[lectureKeys.size()]); // Get DB RestResponse response = new RestResponse(); response.setStatus(true); response.setMessage("ok"); List<LectureDto> lectureList = new ArrayList<LectureDto>(); for (String test : tests) { LectureRepositoryDto repositoryDto = lectureRepository .findOne(test); // setting lecture dto LectureDto lectureDto = new LectureDto(); lectureDto.setLectureName(repositoryDto.getLectureName()); lectureDto.setLectureCode(repositoryDto.getLectureCode()); lectureDto.setLectureLanguage(repositoryDto.getLectureLanguage()); lectureDto.setLectureType(repositoryDto.getLectureType()); lectureList.add(lectureDto); } response.setData(lectureList); // return return response; } // }
Lecture Thubnail List
baas-server/baas-server/src/main/java/com/exactsix/mibaas/lecture/service/LectureService.java
Lecture Thubnail List
Java
apache-2.0
65189b8aaf96236d139db2d180de2475da3eceb4
0
lucastheisen/apache-directory-server,lucastheisen/apache-directory-server,drankye/directory-server,darranl/directory-server,drankye/directory-server,apache/directory-server,darranl/directory-server,apache/directory-server
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.directory.server.core.changelog; import junit.framework.TestCase; import org.apache.commons.io.FileUtils; import org.apache.directory.server.core.DefaultDirectoryService; import org.apache.directory.server.core.DirectoryService; import org.apache.directory.server.core.authn.LdapPrincipal; import org.apache.directory.shared.ldap.constants.AuthenticationLevel; import org.apache.directory.shared.ldap.exception.LdapNameNotFoundException; import org.apache.directory.shared.ldap.message.AttributeImpl; import org.apache.directory.shared.ldap.message.AttributesImpl; import org.apache.directory.shared.ldap.message.ModificationItemImpl; import org.apache.directory.shared.ldap.name.LdapDN; import org.apache.directory.shared.ldap.util.StringTools; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.naming.NamingException; import javax.naming.directory.Attribute; import javax.naming.directory.Attributes; import javax.naming.directory.DirContext; import javax.naming.ldap.LdapContext; import java.io.File; import java.io.IOException; import java.util.Arrays; /** * Used to test the default change log implementation with an in memory * change log store. Note that this will probably be removed since this * functionality will be used and tested anyway in all other test cases. * * @author <a href="mailto:[email protected]">Apache Directory Project</a> * @version $Rev$, $Date$ */ public class DefaultChangeLogITest extends TestCase { public static final Logger LOG = LoggerFactory.getLogger( DefaultChangeLogITest.class ); /** the context root for the system partition */ protected LdapContext sysRoot; protected DirectoryService service; public DefaultChangeLogITest() { this.service = new DefaultDirectoryService(); } /** * Get's the initial context factory for the provider's ou=system context * root. * * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); service.setShutdownHookEnabled( false ); service.getChangeLog().setEnabled( true ); doDelete( service.getWorkingDirectory() ); service.startup(); sysRoot = service.getJndiContext( new LdapPrincipal( new LdapDN(), AuthenticationLevel.SIMPLE ), "ou=system" ); } /** * Deletes the working directory. * * @param wkdir the working directory to delete * @throws IOException if the working directory cannot be deleted */ protected void doDelete( File wkdir ) throws IOException { if ( wkdir.exists() ) { try { FileUtils.deleteDirectory( wkdir ); } catch ( IOException e ) { LOG.error( "Failed to delete the working directory.", e ); } } if ( wkdir.exists() ) { throw new IOException( "Failed to delete: " + wkdir ); } } /** * Issues a shutdown request to the server. */ protected void shutdown() { try { service.shutdown(); } catch ( Exception e ) { LOG.error( "Encountered an error while shutting down directory service.", e ); } sysRoot = null; Runtime.getRuntime().gc(); } /** * Issues a sync request to the server. */ protected void sync() { try { service.sync(); } catch ( Exception e ) { LOG.warn( "Encountered error while syncing.", e ); } } /** * Sets the system context root to null. * * @see junit.framework.TestCase#tearDown() */ protected void tearDown() throws Exception { super.tearDown(); shutdown(); service = new DefaultDirectoryService(); doDelete( service.getWorkingDirectory() ); } public void testTagPersistenceAcrossRestarts() throws NamingException, InterruptedException { assertEquals( 0, service.getChangeLog().getCurrentRevision() ); assertNull( service.getChangeLog().getLatest() ); Tag t0 = service.getChangeLog().tag(); assertEquals( t0, service.getChangeLog().getLatest() ); assertEquals( 0, service.getChangeLog().getCurrentRevision() ); // add new test entry AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); assertEquals( 1, service.getChangeLog().getCurrentRevision() ); service.sync(); service.shutdown(); service.startup(); assertEquals( 1, service.getChangeLog().getCurrentRevision() ); assertEquals( t0, service.getChangeLog().getLatest() ); service.revert(); assertNotPresent( sysRoot, "ou=test" ); assertEquals( 2, service.getChangeLog().getCurrentRevision() ); assertEquals( t0, service.getChangeLog().getLatest() ); } public void testRevertAddOperations() throws NamingException { Tag t0 = service.getChangeLog().tag(); AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); service.revert( t0.getRevision() ); try { sysRoot.getAttributes( "ou=test" ); fail( "Should not be able to find the entry!" ); } catch ( NamingException ne ) { assertTrue( ne instanceof LdapNameNotFoundException ); } } public void testRevertAddAndDeleteOperations() throws NamingException { Tag t0 = service.getChangeLog().tag(); // add new test entry AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); // assert presence assertNotNull( sysRoot.getAttributes( "ou=test" ) ); // delete the test entry and test that it is gone sysRoot.destroySubcontext( "ou=test" ); assertNotPresent( sysRoot, "ou=test" ); // now revert back to begining the added entry is still gone service.revert( t0.getRevision() ); assertNotPresent( sysRoot, "ou=test" ); } public void testRevertDeleteOperations() throws NamingException { AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); // tag after the addition before deletion Tag t0 = service.getChangeLog().tag(); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); // delete the test entry and test that it is gone sysRoot.destroySubcontext( "ou=test" ); assertNotPresent( sysRoot, "ou=test" ); // now revert and assert that the added entry re-appears service.revert( t0.getRevision() ); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); } public void testRevertRenameOperations() throws NamingException { AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "oldname" ); sysRoot.createSubcontext( "ou=oldname", attrs ); // tag after the addition before rename Tag t0 = service.getChangeLog().tag(); assertNotNull( sysRoot.getAttributes( "ou=oldname" ) ); // rename the test entry and test that the rename occurred sysRoot.rename( "ou=oldname", "ou=newname" ); assertNotPresent( sysRoot, "ou=oldname" ); assertNotNull( sysRoot.getAttributes( "ou=newname" ) ); // now revert and assert that the rename was reversed service.revert( t0.getRevision() ); assertNotPresent( sysRoot, "ou=newname" ); assertNotNull( sysRoot.getAttributes( "ou=oldname" ) ); } public void testRevertModifyOperations() throws NamingException { AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); // ------------------------------------------------------------------- // Modify ADD Test // ------------------------------------------------------------------- // tag after the addition before modify ADD Tag t0 = service.getChangeLog().tag(); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); // modify the test entry to add description and test new attr appears sysRoot.modifyAttributes( "ou=test", DirContext.ADD_ATTRIBUTE, new AttributesImpl( "description", "a desc value", true ) ); Attributes resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); Attribute description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( "a desc value", description.get() ); // now revert and assert that the added entry re-appears service.revert( t0.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); assertNull( resusitated.get( "description" ) ); // ------------------------------------------------------------------- // Modify REPLACE Test // ------------------------------------------------------------------- // add the attribute again and make sure it is old value sysRoot.modifyAttributes( "ou=test", DirContext.ADD_ATTRIBUTE, new AttributesImpl( "description", "old value", true ) ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); // now tag then replace the value to "new value" and confirm Tag t1 = service.getChangeLog().tag(); sysRoot.modifyAttributes( "ou=test", DirContext.REPLACE_ATTRIBUTE, new AttributesImpl( "description", "new value", true ) ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "new value" ); // now revert and assert the old value is now reverted service.revert( t1.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); // ------------------------------------------------------------------- // Modify REMOVE Test // ------------------------------------------------------------------- Tag t2 = service.getChangeLog().tag(); sysRoot.modifyAttributes( "ou=test", DirContext.REMOVE_ATTRIBUTE, new AttributesImpl( "description", "old value", true ) ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNull( description ); // now revert and assert the old value is now reverted service.revert( t2.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); // ------------------------------------------------------------------- // Modify Multi Operation Test // ------------------------------------------------------------------- // add a userPassword attribute so we can test replacing it sysRoot.modifyAttributes( "ou=test", DirContext.ADD_ATTRIBUTE, new AttributesImpl( "userPassword", "to be replaced", true ) ); assertPassword( sysRoot.getAttributes( "ou=test" ), "to be replaced" ); ModificationItemImpl[] mods = new ModificationItemImpl[] { new ModificationItemImpl( DirContext.REMOVE_ATTRIBUTE, new AttributeImpl( "description", "old value" ) ), new ModificationItemImpl( DirContext.ADD_ATTRIBUTE, new AttributeImpl( "seeAlso", "ou=added" ) ), new ModificationItemImpl( DirContext.REPLACE_ATTRIBUTE, new AttributeImpl( "userPassword", "a replaced value" ) ) }; Tag t3 = service.getChangeLog().tag(); // now make the modification and check that description is gone, // seeAlso is added, and that the userPassword has been replaced sysRoot.modifyAttributes( "ou=test", mods ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNull( description ); assertPassword( resusitated, "a replaced value" ); Attribute seeAlso = resusitated.get( "seeAlso" ); assertNotNull( seeAlso ); assertEquals( seeAlso.get(), "ou=added" ); // now we revert and make sure the old values are as they were service.revert( t3.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); assertPassword( resusitated, "to be replaced" ); seeAlso = resusitated.get( "seeAlso" ); assertNull( seeAlso ); } private void assertPassword( Attributes entry, String password ) throws NamingException { Attribute userPassword = entry.get( "userPassword" ); assertNotNull( userPassword ); Arrays.equals( password.getBytes(), ( byte[] ) userPassword.get() ); } private void assertNotPresent( DirContext ctx, String dn ) throws NamingException { try { ctx.getAttributes( dn ); fail( "Should not be able to find the entry " + dn + " but it is still there." ); } catch ( NamingException ne ) { assertTrue( ne instanceof LdapNameNotFoundException ); } } }
core-unit/src/test/java/org/apache/directory/server/core/changelog/DefaultChangeLogITest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.directory.server.core.changelog; import junit.framework.TestCase; import org.apache.commons.io.FileUtils; import org.apache.directory.server.core.DefaultDirectoryService; import org.apache.directory.server.core.DirectoryService; import org.apache.directory.server.core.authn.LdapPrincipal; import org.apache.directory.shared.ldap.constants.AuthenticationLevel; import org.apache.directory.shared.ldap.exception.LdapNameNotFoundException; import org.apache.directory.shared.ldap.message.AttributeImpl; import org.apache.directory.shared.ldap.message.AttributesImpl; import org.apache.directory.shared.ldap.message.ModificationItemImpl; import org.apache.directory.shared.ldap.name.LdapDN; import org.apache.directory.shared.ldap.util.StringTools; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.naming.NamingException; import javax.naming.directory.Attribute; import javax.naming.directory.Attributes; import javax.naming.directory.DirContext; import javax.naming.ldap.LdapContext; import java.io.File; import java.io.IOException; import java.util.Arrays; /** * Used to test the default change log implementation with an in memory * change log store. Note that this will probably be removed since this * functionality will be used and tested anyway in all other test cases. * * @author <a href="mailto:[email protected]">Apache Directory Project</a> * @version $Rev$, $Date$ */ public class DefaultChangeLogITest extends TestCase { public static final Logger LOG = LoggerFactory.getLogger( DefaultChangeLogITest.class ); /** the context root for the system partition */ protected LdapContext sysRoot; protected DirectoryService service; public DefaultChangeLogITest() { this.service = new DefaultDirectoryService(); } /** * Get's the initial context factory for the provider's ou=system context * root. * * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); service.setShutdownHookEnabled( false ); service.getChangeLog().setEnabled( true ); doDelete( service.getWorkingDirectory() ); service.startup(); sysRoot = service.getJndiContext( new LdapPrincipal( new LdapDN(), AuthenticationLevel.SIMPLE ), "ou=system" ); } /** * Deletes the working directory. * * @param wkdir the working directory to delete * @throws IOException if the working directory cannot be deleted */ protected void doDelete( File wkdir ) throws IOException { if ( wkdir.exists() ) { try { FileUtils.deleteDirectory( wkdir ); } catch ( IOException e ) { LOG.error( "Failed to delete the working directory.", e ); } } if ( wkdir.exists() ) { throw new IOException( "Failed to delete: " + wkdir ); } } /** * Issues a shutdown request to the server. */ protected void shutdown() { try { service.shutdown(); } catch ( Exception e ) { LOG.error( "Encountered an error while shutting down directory service.", e ); } sysRoot = null; Runtime.getRuntime().gc(); } /** * Issues a sync request to the server. */ protected void sync() { try { service.sync(); } catch ( Exception e ) { LOG.warn( "Encountered error while syncing.", e ); } } /** * Sets the system context root to null. * * @see junit.framework.TestCase#tearDown() */ protected void tearDown() throws Exception { super.tearDown(); shutdown(); service = new DefaultDirectoryService(); doDelete( service.getWorkingDirectory() ); } public void testRevertAddOperations() throws NamingException { Tag t0 = service.getChangeLog().tag(); AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); service.revert( t0.getRevision() ); try { sysRoot.getAttributes( "ou=test" ); fail( "Should not be able to find the entry!" ); } catch ( NamingException ne ) { assertTrue( ne instanceof LdapNameNotFoundException ); } } public void testRevertAddAndDeleteOperations() throws NamingException { Tag t0 = service.getChangeLog().tag(); // add new test entry AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); // assert presence assertNotNull( sysRoot.getAttributes( "ou=test" ) ); // delete the test entry and test that it is gone sysRoot.destroySubcontext( "ou=test" ); assertNotPresent( sysRoot, "ou=test" ); // now revert back to begining the added entry is still gone service.revert( t0.getRevision() ); assertNotPresent( sysRoot, "ou=test" ); } public void testRevertDeleteOperations() throws NamingException { AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); // tag after the addition before deletion Tag t0 = service.getChangeLog().tag(); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); // delete the test entry and test that it is gone sysRoot.destroySubcontext( "ou=test" ); assertNotPresent( sysRoot, "ou=test" ); // now revert and assert that the added entry re-appears service.revert( t0.getRevision() ); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); } public void testRevertRenameOperations() throws NamingException { AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "oldname" ); sysRoot.createSubcontext( "ou=oldname", attrs ); // tag after the addition before rename Tag t0 = service.getChangeLog().tag(); assertNotNull( sysRoot.getAttributes( "ou=oldname" ) ); // rename the test entry and test that the rename occurred sysRoot.rename( "ou=oldname", "ou=newname" ); assertNotPresent( sysRoot, "ou=oldname" ); assertNotNull( sysRoot.getAttributes( "ou=newname" ) ); // now revert and assert that the rename was reversed service.revert( t0.getRevision() ); assertNotPresent( sysRoot, "ou=newname" ); assertNotNull( sysRoot.getAttributes( "ou=oldname" ) ); } public void testRevertModifyOperations() throws NamingException { AttributesImpl attrs = new AttributesImpl( "objectClass", "organizationalUnit", true ); attrs.put( "ou", "test" ); sysRoot.createSubcontext( "ou=test", attrs ); // ------------------------------------------------------------------- // Modify ADD Test // ------------------------------------------------------------------- // tag after the addition before modify ADD Tag t0 = service.getChangeLog().tag(); assertNotNull( sysRoot.getAttributes( "ou=test" ) ); // modify the test entry to add description and test new attr appears sysRoot.modifyAttributes( "ou=test", DirContext.ADD_ATTRIBUTE, new AttributesImpl( "description", "a desc value", true ) ); Attributes resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); Attribute description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( "a desc value", description.get() ); // now revert and assert that the added entry re-appears service.revert( t0.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); assertNull( resusitated.get( "description" ) ); // ------------------------------------------------------------------- // Modify REPLACE Test // ------------------------------------------------------------------- // add the attribute again and make sure it is old value sysRoot.modifyAttributes( "ou=test", DirContext.ADD_ATTRIBUTE, new AttributesImpl( "description", "old value", true ) ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); // now tag then replace the value to "new value" and confirm Tag t1 = service.getChangeLog().tag(); sysRoot.modifyAttributes( "ou=test", DirContext.REPLACE_ATTRIBUTE, new AttributesImpl( "description", "new value", true ) ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "new value" ); // now revert and assert the old value is now reverted service.revert( t1.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); // ------------------------------------------------------------------- // Modify REMOVE Test // ------------------------------------------------------------------- Tag t2 = service.getChangeLog().tag(); sysRoot.modifyAttributes( "ou=test", DirContext.REMOVE_ATTRIBUTE, new AttributesImpl( "description", "old value", true ) ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNull( description ); // now revert and assert the old value is now reverted service.revert( t2.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); // ------------------------------------------------------------------- // Modify Multi Operation Test // ------------------------------------------------------------------- // add a userPassword attribute so we can test replacing it sysRoot.modifyAttributes( "ou=test", DirContext.ADD_ATTRIBUTE, new AttributesImpl( "userPassword", "to be replaced", true ) ); assertPassword( sysRoot.getAttributes( "ou=test" ), "to be replaced" ); ModificationItemImpl[] mods = new ModificationItemImpl[] { new ModificationItemImpl( DirContext.REMOVE_ATTRIBUTE, new AttributeImpl( "description", "old value" ) ), new ModificationItemImpl( DirContext.ADD_ATTRIBUTE, new AttributeImpl( "seeAlso", "ou=added" ) ), new ModificationItemImpl( DirContext.REPLACE_ATTRIBUTE, new AttributeImpl( "userPassword", "a replaced value" ) ) }; Tag t3 = service.getChangeLog().tag(); // now make the modification and check that description is gone, // seeAlso is added, and that the userPassword has been replaced sysRoot.modifyAttributes( "ou=test", mods ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNull( description ); assertPassword( resusitated, "a replaced value" ); Attribute seeAlso = resusitated.get( "seeAlso" ); assertNotNull( seeAlso ); assertEquals( seeAlso.get(), "ou=added" ); // now we revert and make sure the old values are as they were service.revert( t3.getRevision() ); resusitated = sysRoot.getAttributes( "ou=test" ); assertNotNull( resusitated ); description = resusitated.get( "description" ); assertNotNull( description ); assertEquals( description.get(), "old value" ); assertPassword( resusitated, "to be replaced" ); seeAlso = resusitated.get( "seeAlso" ); assertNull( seeAlso ); } private void assertPassword( Attributes entry, String password ) throws NamingException { Attribute userPassword = entry.get( "userPassword" ); assertNotNull( userPassword ); Arrays.equals( password.getBytes(), ( byte[] ) userPassword.get() ); } private void assertNotPresent( DirContext ctx, String dn ) throws NamingException { try { ctx.getAttributes( dn ); fail( "Should not be able to find the entry " + dn + " but it is still there." ); } catch ( NamingException ne ) { assertTrue( ne instanceof LdapNameNotFoundException ); } } }
added test to check change log persistence across restarts git-svn-id: dd90f696ee312d86d1f195500465131112b150f5@600731 13f79535-47bb-0310-9956-ffa450edef68
core-unit/src/test/java/org/apache/directory/server/core/changelog/DefaultChangeLogITest.java
added test to check change log persistence across restarts
Java
apache-2.0
2298222dfcf22cc2370aa84f8d254e99db0963b8
0
spinnaker/kork,spinnaker/kork,spinnaker/kork,spinnaker/kork,spinnaker/kork
/* * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.kork.expressions; import static java.lang.String.format; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.spinnaker.kork.api.expressions.ExpressionFunctionProvider; import com.netflix.spinnaker.kork.expressions.allowlist.AllowListTypeLocator; import com.netflix.spinnaker.kork.expressions.allowlist.FilteredMethodResolver; import com.netflix.spinnaker.kork.expressions.allowlist.FilteredPropertyAccessor; import com.netflix.spinnaker.kork.expressions.allowlist.MapPropertyAccessor; import com.netflix.spinnaker.kork.expressions.allowlist.ReturnTypeRestrictor; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import org.pf4j.PluginManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.expression.spel.support.StandardEvaluationContext; /** * Provides utility support for SpEL integration Supports registering SpEL functions, ACLs to * classes (via allow list) */ public class ExpressionsSupport { private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionsSupport.class); private static final ObjectMapper mapper = new ObjectMapper(); private final Set<Class<?>> allowedReturnTypes; private final List<ExpressionFunctionProvider> expressionFunctionProviders; public ExpressionsSupport(Class<?> extraAllowedReturnType) { this(new Class[] {extraAllowedReturnType}, null, null); } public ExpressionsSupport( Class<?>[] extraAllowedReturnTypes, List<ExpressionFunctionProvider> extraExpressionFunctionProviders, PluginManager pluginManager) { allowedReturnTypes = new HashSet<>( Arrays.asList( Collection.class, Map.class, SortedMap.class, List.class, Set.class, SortedSet.class, ArrayList.class, LinkedList.class, HashSet.class, LinkedHashSet.class, HashMap.class, LinkedHashMap.class, TreeMap.class, TreeSet.class)); Collections.addAll(allowedReturnTypes, extraAllowedReturnTypes); expressionFunctionProviders = new ArrayList<>( Arrays.asList( new JsonExpressionFunctionProvider(), new StringExpressionFunctionProvider())); if (extraExpressionFunctionProviders != null) { expressionFunctionProviders.addAll(extraExpressionFunctionProviders); } // TODO(rz): Once plugins are no longer an incubating feature, extraExpressionFunctionProviders // var could be removed if (pluginManager != null) { expressionFunctionProviders.addAll( pluginManager.getExtensions(ExpressionFunctionProvider.class)); } } public List<ExpressionFunctionProvider> getExpressionFunctionProviders() { return expressionFunctionProviders; } private static void registerFunction( StandardEvaluationContext context, String registrationName, Class<?> cls, String methodName, Class<?>... types) { try { context.registerFunction(registrationName, cls.getDeclaredMethod(methodName, types)); } catch (NoSuchMethodException e) { LOGGER.error("Failed to register helper function", e); throw new RuntimeException( "Failed to register helper function '" + registrationName + "' from '" + cls.getName() + "#" + methodName + "'", e); } } /** * Creates a configured SpEL evaluation context * * @param rootObject the root object to transform * @param allowUnknownKeys flag to control what helper functions are available * @return an evaluation context hooked with helper functions and correct ACL via allow list */ public StandardEvaluationContext buildEvaluationContext( Object rootObject, boolean allowUnknownKeys) { StandardEvaluationContext evaluationContext = createEvaluationContext(rootObject, allowUnknownKeys); registerExpressionProviderFunctions(evaluationContext); return evaluationContext; } private StandardEvaluationContext createEvaluationContext( Object rootObject, boolean allowUnknownKeys) { ReturnTypeRestrictor returnTypeRestrictor = new ReturnTypeRestrictor(allowedReturnTypes); StandardEvaluationContext evaluationContext = new StandardEvaluationContext(rootObject); evaluationContext.setTypeLocator(new AllowListTypeLocator()); evaluationContext.setMethodResolvers( Collections.singletonList(new FilteredMethodResolver(returnTypeRestrictor))); evaluationContext.setPropertyAccessors( Arrays.asList( new MapPropertyAccessor(allowUnknownKeys), new FilteredPropertyAccessor(returnTypeRestrictor))); return evaluationContext; } private void registerExpressionProviderFunctions(StandardEvaluationContext evaluationContext) { for (ExpressionFunctionProvider p : expressionFunctionProviders) { for (ExpressionFunctionProvider.FunctionDefinition function : p.getFunctions().getFunctionsDefinitions()) { String namespacedFunctionName = function.getName(); if (p.getNamespace() != null) { namespacedFunctionName = format("%s_%s", p.getNamespace(), namespacedFunctionName); } Class[] functionTypes = function.getParameters().stream() .map(ExpressionFunctionProvider.FunctionParameter::getType) .toArray(Class[]::new); registerFunction( evaluationContext, namespacedFunctionName, p.getExtensionClass(), function.getName(), functionTypes); } } } @SuppressWarnings("unused") public static class JsonExpressionFunctionProvider implements ExpressionFunctionProvider { /** * @param o represents an object to convert to json * @return json representation of the said object */ public static String toJson(Object o) { try { String converted = mapper.writeValueAsString(o); if (converted != null && converted.contains("${")) { throw new SpelHelperFunctionException("result for toJson cannot contain an expression"); } return converted; } catch (Exception e) { throw new SpelHelperFunctionException(format("#toJson(%s) failed", o.toString()), e); } } @Override public String getNamespace() { return null; } @Override public Functions getFunctions() { return new Functions( new FunctionDefinition( "toJson", "Converts an object to JSON string", new FunctionParameter( Object.class, "value", "An Object to marshall to a JSON String"))); } } @SuppressWarnings("unused") public static class StringExpressionFunctionProvider implements ExpressionFunctionProvider { /** * Parses a string to an integer * * @param str represents an int * @return an integer */ public static Integer toInt(String str) { return Integer.valueOf(str); } /** * Parses a string to a float * * @param str represents an float * @return an float */ public static Float toFloat(String str) { return Float.valueOf(str); } /** * Parses a string to a boolean * * @param str represents an boolean * @return a boolean */ public static Boolean toBoolean(String str) { return Boolean.valueOf(str); } /** * Encodes a string to base64 * * @param text plain string * @return converted string */ public static String toBase64(String text) { return Base64.getEncoder().encodeToString(text.getBytes()); } /** * Attempts to decode a base64 string * * @param text plain string * @return decoded string */ public static String fromBase64(String text) { return new String(Base64.getDecoder().decode(text), StandardCharsets.UTF_8); } /** * Converts a String to alpha numeric * * @param str string to convert * @return converted string */ public static String alphanumerical(String str) { return str.replaceAll("[^A-Za-z0-9]", ""); } @Override public String getNamespace() { return null; } @Override public Functions getFunctions() { return new Functions( new FunctionDefinition( "toInt", "Converts a string to integer", new FunctionParameter(String.class, "value", "A String value to convert to an int")), new FunctionDefinition( "toFloat", "Converts a string to float", new FunctionParameter(String.class, "value", "A String value to convert to a float")), new FunctionDefinition( "toBoolean", "Converts a string value to boolean", new FunctionParameter( String.class, "value", "A String value to convert to a boolean")), new FunctionDefinition( "toBase64", "Encodes a string to base64 string", new FunctionParameter(String.class, "value", "A String value to base64 encode")), new FunctionDefinition( "fromBase64", "Decodes a base64 string", new FunctionParameter( String.class, "value", "A base64-encoded String value to decode")), new FunctionDefinition( "alphanumerical", "Removes all non-alphanumeric characters from a string", new FunctionParameter( String.class, "value", "A String value to strip of all non-alphanumeric characters"))); } } }
kork-expressions/src/main/java/com/netflix/spinnaker/kork/expressions/ExpressionsSupport.java
/* * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.kork.expressions; import static java.lang.String.format; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.spinnaker.kork.api.expressions.ExpressionFunctionProvider; import com.netflix.spinnaker.kork.expressions.allowlist.AllowListTypeLocator; import com.netflix.spinnaker.kork.expressions.allowlist.FilteredMethodResolver; import com.netflix.spinnaker.kork.expressions.allowlist.FilteredPropertyAccessor; import com.netflix.spinnaker.kork.expressions.allowlist.MapPropertyAccessor; import com.netflix.spinnaker.kork.expressions.allowlist.ReturnTypeRestrictor; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import org.pf4j.PluginManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.expression.spel.support.StandardEvaluationContext; /** * Provides utility support for SpEL integration Supports registering SpEL functions, ACLs to * classes (via allow list) */ public class ExpressionsSupport { private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionsSupport.class); private static final ObjectMapper mapper = new ObjectMapper(); private final Set<Class<?>> allowedReturnTypes; private final List<ExpressionFunctionProvider> expressionFunctionProviders; public ExpressionsSupport(Class<?> extraAllowedReturnType) { this(new Class[] {extraAllowedReturnType}, null, null); } public ExpressionsSupport( Class<?>[] extraAllowedReturnTypes, List<ExpressionFunctionProvider> extraExpressionFunctionProviders, PluginManager pluginManager) { allowedReturnTypes = new HashSet<>( Arrays.asList( Collection.class, Map.class, SortedMap.class, List.class, Set.class, SortedSet.class, ArrayList.class, LinkedList.class, HashSet.class, LinkedHashSet.class, HashMap.class, LinkedHashMap.class, TreeMap.class, TreeSet.class)); Collections.addAll(allowedReturnTypes, extraAllowedReturnTypes); expressionFunctionProviders = new ArrayList<>( Arrays.asList( new JsonExpressionFunctionProvider(), new StringExpressionFunctionProvider())); if (extraExpressionFunctionProviders != null) { expressionFunctionProviders.addAll(extraExpressionFunctionProviders); } // TODO(rz): Once plugins are no longer an incubating feature, extraExpressionFunctionProviders // var could be removed if (pluginManager != null) { expressionFunctionProviders.addAll( pluginManager.getExtensions(ExpressionFunctionProvider.class)); } } public List<ExpressionFunctionProvider> getExpressionFunctionProviders() { return expressionFunctionProviders; } private static void registerFunction( StandardEvaluationContext context, String registrationName, Class<?> cls, String methodName, Class<?>... types) { try { context.registerFunction(registrationName, cls.getDeclaredMethod(methodName, types)); } catch (NoSuchMethodException e) { LOGGER.error("Failed to register helper function", e); throw new RuntimeException( "Failed to register helper function '" + registrationName + "' from '" + cls.getName() + "#" + methodName + "'", e); } } /** * Creates a configured SpEL evaluation context * * @param rootObject the root object to transform * @param allowUnknownKeys flag to control what helper functions are available * @return an evaluation context hooked with helper functions and correct ACL via allow list */ public StandardEvaluationContext buildEvaluationContext( Object rootObject, boolean allowUnknownKeys) { StandardEvaluationContext evaluationContext = createEvaluationContext(rootObject, allowUnknownKeys); registerExpressionProviderFunctions(evaluationContext); return evaluationContext; } private StandardEvaluationContext createEvaluationContext( Object rootObject, boolean allowUnknownKeys) { ReturnTypeRestrictor returnTypeRestrictor = new ReturnTypeRestrictor(allowedReturnTypes); StandardEvaluationContext evaluationContext = new StandardEvaluationContext(rootObject); evaluationContext.setTypeLocator(new AllowListTypeLocator()); evaluationContext.setMethodResolvers( Collections.singletonList(new FilteredMethodResolver(returnTypeRestrictor))); evaluationContext.setPropertyAccessors( Arrays.asList( new MapPropertyAccessor(allowUnknownKeys), new FilteredPropertyAccessor(returnTypeRestrictor))); return evaluationContext; } private void registerExpressionProviderFunctions(StandardEvaluationContext evaluationContext) { for (ExpressionFunctionProvider p : expressionFunctionProviders) { for (ExpressionFunctionProvider.FunctionDefinition function : p.getFunctions().getFunctionsDefinitions()) { String namespacedFunctionName = function.getName(); if (p.getNamespace() != null) { namespacedFunctionName = format("%s_%s", p.getNamespace(), namespacedFunctionName); } Class[] functionTypes = function.getParameters().stream() .map(ExpressionFunctionProvider.FunctionParameter::getType) .toArray(Class[]::new); registerFunction( evaluationContext, namespacedFunctionName, p.getClass(), function.getName(), functionTypes); } } } @SuppressWarnings("unused") public static class JsonExpressionFunctionProvider implements ExpressionFunctionProvider { /** * @param o represents an object to convert to json * @return json representation of the said object */ public static String toJson(Object o) { try { String converted = mapper.writeValueAsString(o); if (converted != null && converted.contains("${")) { throw new SpelHelperFunctionException("result for toJson cannot contain an expression"); } return converted; } catch (Exception e) { throw new SpelHelperFunctionException(format("#toJson(%s) failed", o.toString()), e); } } @Override public String getNamespace() { return null; } @Override public Functions getFunctions() { return new Functions( new FunctionDefinition( "toJson", "Converts an object to JSON string", new FunctionParameter( Object.class, "value", "An Object to marshall to a JSON String"))); } } @SuppressWarnings("unused") public static class StringExpressionFunctionProvider implements ExpressionFunctionProvider { /** * Parses a string to an integer * * @param str represents an int * @return an integer */ public static Integer toInt(String str) { return Integer.valueOf(str); } /** * Parses a string to a float * * @param str represents an float * @return an float */ public static Float toFloat(String str) { return Float.valueOf(str); } /** * Parses a string to a boolean * * @param str represents an boolean * @return a boolean */ public static Boolean toBoolean(String str) { return Boolean.valueOf(str); } /** * Encodes a string to base64 * * @param text plain string * @return converted string */ public static String toBase64(String text) { return Base64.getEncoder().encodeToString(text.getBytes()); } /** * Attempts to decode a base64 string * * @param text plain string * @return decoded string */ public static String fromBase64(String text) { return new String(Base64.getDecoder().decode(text), StandardCharsets.UTF_8); } /** * Converts a String to alpha numeric * * @param str string to convert * @return converted string */ public static String alphanumerical(String str) { return str.replaceAll("[^A-Za-z0-9]", ""); } @Override public String getNamespace() { return null; } @Override public Functions getFunctions() { return new Functions( new FunctionDefinition( "toInt", "Converts a string to integer", new FunctionParameter(String.class, "value", "A String value to convert to an int")), new FunctionDefinition( "toFloat", "Converts a string to float", new FunctionParameter(String.class, "value", "A String value to convert to a float")), new FunctionDefinition( "toBoolean", "Converts a string value to boolean", new FunctionParameter( String.class, "value", "A String value to convert to a boolean")), new FunctionDefinition( "toBase64", "Encodes a string to base64 string", new FunctionParameter(String.class, "value", "A String value to base64 encode")), new FunctionDefinition( "fromBase64", "Decodes a base64 string", new FunctionParameter( String.class, "value", "A base64-encoded String value to decode")), new FunctionDefinition( "alphanumerical", "Removes all non-alphanumeric characters from a string", new FunctionParameter( String.class, "value", "A String value to strip of all non-alphanumeric characters"))); } } }
fix(plugins): Use getExtensionClass() instead of getClass() for ExpressionFunctionProviders (#864) ExpressionFunctionProvider extends SpinnakerExtensionPoint, enabling SpEL expressions to be contributed by plugins. However, when the functions are being registered, the class of the ExpressionFunctionProvider is used - for ExpressionFunctionProviders coming from a plugin, this is a proxy class. In order for this to work correctly, the getExtensionClass() method should be used instead to resolve the appropriate class (proxy class or not). Co-authored-by: David Byron <10298d4ee57fdb39c45d5c5e4c1ffc7586d20965@users.noreply.github.com>
kork-expressions/src/main/java/com/netflix/spinnaker/kork/expressions/ExpressionsSupport.java
fix(plugins): Use getExtensionClass() instead of getClass() for ExpressionFunctionProviders (#864)
Java
apache-2.0
c61d62ef4b42aa68fe5c95befaf0bd7664844c6d
0
spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.datasource.init; import javax.sql.DataSource; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.util.Assert; /** * Used to {@linkplain #setDatabasePopulator set up} a database during * initialization and {@link #setDatabaseCleaner clean up} a database during * destruction. * * @author Dave Syer * @author Sam Brannen * @since 3.0 * @see DatabasePopulator */ public class DataSourceInitializer implements InitializingBean, DisposableBean { private DataSource dataSource; private DatabasePopulator databasePopulator; private DatabasePopulator databaseCleaner; private boolean enabled = true; /** * The {@link DataSource} for the database to populate when this component * is initialized and to clean up when this component is shut down. * <p>This property is mandatory with no default provided. * @param dataSource the DataSource */ public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } /** * Set the {@link DatabasePopulator} to execute during the bean initialization * phase. * @param databasePopulator the {@code DatabasePopulator} to use during * initialization * @see #setDatabaseCleaner */ public void setDatabasePopulator(DatabasePopulator databasePopulator) { this.databasePopulator = databasePopulator; } /** * Set the {@link DatabasePopulator} to execute during the bean destruction * phase, cleaning up the database and leaving it in a known state for others. * @param databaseCleaner the {@code DatabasePopulator} to use during destruction * @see #setDatabasePopulator */ public void setDatabaseCleaner(DatabasePopulator databaseCleaner) { this.databaseCleaner = databaseCleaner; } /** * Flag to explicitly enable or disable the {@linkplain #setDatabasePopulator * database populator} and {@linkplain #setDatabaseCleaner database cleaner}. * @param enabled {@code true} if the database populator and database cleaner * should be called on startup and shutdown, respectively */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * Use the {@linkplain #setDatabasePopulator database populator} to set up * the database. */ @Override public void afterPropertiesSet() { execute(this.databasePopulator); } /** * Use the {@linkplain #setDatabaseCleaner database cleaner} to clean up the * database. */ @Override public void destroy() { execute(this.databaseCleaner); } private void execute(DatabasePopulator populator) { Assert.state(dataSource != null, "DataSource must be set"); if (this.enabled && populator != null) { DatabasePopulatorUtils.execute(populator, this.dataSource); } } }
spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/DataSourceInitializer.java
/* * Copyright 2002-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.datasource.init; import javax.sql.DataSource; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; /** * Used to populate a database during initialization. * * @author Dave Syer * @since 3.0 * @see DatabasePopulator */ public class DataSourceInitializer implements InitializingBean, DisposableBean { private DataSource dataSource; private DatabasePopulator databasePopulator; private DatabasePopulator databaseCleaner; private boolean enabled = true; /** * The {@link DataSource} to populate when this component is initialized. * Mandatory with no default. * @param dataSource the DataSource */ public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } /** * The {@link DatabasePopulator} to use to populate the data source. * Mandatory with no default. * @param databasePopulator the database populator to use. */ public void setDatabasePopulator(DatabasePopulator databasePopulator) { this.databasePopulator = databasePopulator; } /** * Set a script execution to be run in the bean destruction callback, * cleaning up the database and leaving it in a known state for others. * @param databaseCleaner the database script executor to run on destroy */ public void setDatabaseCleaner(DatabasePopulator databaseCleaner) { this.databaseCleaner = databaseCleaner; } /** * Flag to explicitly enable or disable the database populator. * @param enabled true if the database populator will be called on startup */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * Use the populator to set up data in the data source. */ @Override public void afterPropertiesSet() { if (this.databasePopulator != null && this.enabled) { DatabasePopulatorUtils.execute(this.databasePopulator, this.dataSource); } } /** * Use the populator to clean up data in the data source. */ @Override public void destroy() { if (this.databaseCleaner != null && this.enabled) { DatabasePopulatorUtils.execute(this.databaseCleaner, this.dataSource); } } }
Improve DataSourceInitializer Javadoc and implementation
spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/DataSourceInitializer.java
Improve DataSourceInitializer Javadoc and implementation
Java
apache-2.0
52ad49b3ae538ebc0ac10993abd317fb71217a48
0
spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework
/* * Copyright 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.bind.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotation which indicates that a method parameter should be bound to an HTTP cookie. * Supported for annotated handler methods in Servlet and Portlet environments. * * <p>The method parameter may be declared as type {@link javax.servlet.http.Cookie} * or as cookie value type (String, int, etc). * * @author Juergen Hoeller * @since 3.0 * @see RequestMapping * @see RequestParam * @see RequestHeader * @see org.springframework.web.bind.annotation.RequestMapping * @see org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter * @see org.springframework.web.portlet.mvc.annotation.AnnotationMethodHandlerAdapter */ @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface CookieValue { /** * The name of the cookie to bind to. */ String value() default ""; /** * Whether the header is required. * <p>Default is <code>true</code>, leading to an exception being thrown * in case the header is missing in the request. Switch this to * <code>false</code> if you prefer a <code>null</value> in case of the * missing header. * <p>Alternatively, provide a {@link #defaultValue() defaultValue}, * which implicitly sets this flag to <code>false</code>. */ boolean required() default true; /** * The default value to use as a fallback. Supplying a default value implicitly * sets {@link #required()} to false. */ String defaultValue() default ""; }
org.springframework.web/src/main/java/org/springframework/web/bind/annotation/CookieValue.java
/* * Copyright 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.bind.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotation which indicates that a method parameter should be bound to an HTTP cookie. * Supported for annotated handler methods in Servlet and Portlet environments. * * <p>The method parameter may be declared as type {@link javax.servlet.http.Cookie} * or as cookie value type (String, int, etc). * * @author Juergen Hoeller * @since 3.0 * @see RequestMapping * @see RequestParam * @see RequestHeader * @see org.springframework.web.bind.annotation.RequestMapping * @see org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter * @see org.springframework.web.portlet.mvc.annotation.AnnotationMethodHandlerAdapter */ @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface CookieValue { /** * The name of the cookie to bind to. */ String value() default ""; /** * Whether the header is required. * <p>Default is <code>true</code>, leading to an exception thrown in case * of the header missing in the request. Switch this to <code>false</code> * if you prefer a <code>null</value> in case of the header missing. * <p>Alternatively, provide a {@link #defaultValue() defaultValue}, * which implicitly sets this flag to <code>false</code>. */ boolean required() default true; /** * The default value to use as a fallback. Supplying a default value implicitely * sets {@link #required()} to false. */ String defaultValue() default ""; }
fixed typo and JavaDoc polishing.
org.springframework.web/src/main/java/org/springframework/web/bind/annotation/CookieValue.java
fixed typo and JavaDoc polishing.
Java
apache-2.0
6180d028d12a5d65018d86e8264ad841ce7a3e2d
0
apache/directory-project
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.shared.ldap.codec.bind; import java.nio.ByteBuffer; import java.util.List; import javax.naming.NamingException; import org.apache.directory.shared.asn1.ber.Asn1Decoder; import org.apache.directory.shared.asn1.ber.IAsn1Container; import org.apache.directory.shared.asn1.codec.DecoderException; import org.apache.directory.shared.asn1.codec.EncoderException; import org.apache.directory.shared.ldap.codec.Control; import org.apache.directory.shared.ldap.codec.LdapDecoder; import org.apache.directory.shared.ldap.codec.LdapMessage; import org.apache.directory.shared.ldap.codec.LdapMessageContainer; import org.apache.directory.shared.ldap.codec.bind.BindRequest; import org.apache.directory.shared.ldap.codec.bind.SimpleAuthentication; import org.apache.directory.shared.ldap.name.LdapDN; import org.apache.directory.shared.ldap.util.StringTools; import junit.framework.TestCase; /** * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class BindRequestPerfTest extends TestCase { /** * Test the decoding of a BindRequest with Simple authentication and no * controls */ public void testDecodeBindRequestSimpleNoControlsPerf() { Asn1Decoder ldapDecoder = new LdapDecoder(); ByteBuffer stream = ByteBuffer.allocate( 0x52 ); stream.put( new byte[] { 0x30, 0x50, // LDAPMessage ::=SEQUENCE { 0x02, 0x01, 0x01, // messageID MessageID 0x60, 0x2E, // CHOICE { ..., bindRequest BindRequest, ... // BindRequest ::= APPLICATION[0] SEQUENCE { 0x02, 0x01, 0x03, // version INTEGER (1..127), 0x04, 0x1F, // name LDAPDN, 'u', 'i', 'd', '=', 'a', 'k', 'a', 'r', 'a', 's', 'u', 'l', 'u', ',', 'd', 'c', '=', 'e', 'x', 'a', 'm', 'p', 'l', 'e', ',', 'd', 'c', '=', 'c', 'o', 'm', ( byte ) 0x80, 0x08, // authentication AuthenticationChoice // AuthenticationChoice ::= CHOICE { simple [0] OCTET STRING, // ... 'p', 'a', 's', 's', 'w', 'o', 'r', 'd', ( byte ) 0xA0, 0x1B, // A control 0x30, 0x19, 0x04, 0x17, 0x32, 0x2E, 0x31, 0x36, 0x2E, 0x38, 0x34, 0x30, 0x2E, 0x31, 0x2E, 0x31, 0x31, 0x33, 0x37, 0x33, 0x30, 0x2E, 0x33, 0x2E, 0x34, 0x2E, 0x32 } ); String decodedPdu = StringTools.dumpBytes( stream.array() ); stream.flip(); // Allocate a LdapMessage Container IAsn1Container ldapMessageContainer = new LdapMessageContainer(); // Decode the BindRequest PDU try { int nbLoops = 10; long t0 = System.currentTimeMillis(); for ( int i = 0; i < nbLoops; i++ ) { ldapDecoder.decode( stream, ldapMessageContainer ); ( ( LdapMessageContainer ) ldapMessageContainer).clean(); stream.flip(); } long t1 = System.currentTimeMillis(); System.out.println( "testDecodeBindRequestSimpleNoControlsPerf, " + nbLoops + " loops, Delta = " + ( t1 - t0 ) ); ldapDecoder.decode( stream, ldapMessageContainer ); } catch ( DecoderException de ) { de.printStackTrace(); fail( de.getMessage() ); } catch ( NamingException ne ) { ne.printStackTrace(); fail( ne.getMessage() ); } // Check the decoded BindRequest LdapMessage message = ( ( LdapMessageContainer ) ldapMessageContainer ).getLdapMessage(); BindRequest br = message.getBindRequest(); assertEquals( 1, message.getMessageId() ); assertEquals( 3, br.getVersion() ); assertEquals( "uid=akarasulu,dc=example,dc=com", br.getName().toString() ); assertEquals( true, ( br.getAuthentication() instanceof SimpleAuthentication ) ); assertEquals( "password", StringTools.utf8ToString( ( ( SimpleAuthentication ) br.getAuthentication() ) .getSimple() ) ); // Check the Control List controls = message.getControls(); assertEquals( 1, controls.size() ); Control control = message.getControls( 0 ); assertEquals( "2.16.840.1.113730.3.4.2", control.getControlType() ); assertEquals( "", StringTools.dumpBytes( ( byte[] ) control.getControlValue() ) ); // Check the length assertEquals( 0x52, message.computeLength() ); // Check the encoding try { ByteBuffer bb = message.encode( null ); String encodedPdu = StringTools.dumpBytes( bb.array() ); assertEquals( encodedPdu, decodedPdu ); } catch ( EncoderException ee ) { ee.printStackTrace(); fail( ee.getMessage() ); } } /** * Test the decoding of a BindRequest with Simple authentication and no * controls */ public void testEncodeBindRequestPerf() throws Exception { LdapDN name = new LdapDN( "uid=akarasulu,dc=example,dc=com" ); int nbLoops = 10; long t0 = System.currentTimeMillis(); ByteBuffer bb=null; for ( int i = 0; i< nbLoops; i++) { // Check the decoded BindRequest LdapMessage message = new LdapMessage(); message.setMessageId( 1 ); BindRequest br = new BindRequest(); br.setMessageId( 1 ); br.setName( name ); Control control = new Control(); control.setControlType( "2.16.840.1.113730.3.4.2" ); LdapAuthentication authentication = new SimpleAuthentication(); ((SimpleAuthentication)authentication).setSimple( StringTools.getBytesUtf8( "password" ) ); br.addControl( control ); br.setAuthentication( authentication ); message.setProtocolOP( br ); // Check the encoding try { bb = message.encode( null ); } catch ( EncoderException ee ) { ee.printStackTrace(); fail( ee.getMessage() ); } } long t1 = System.currentTimeMillis(); System.out.println( "BindRequest testEncodeBindRequestPerf, " + nbLoops + " loops, Delta = " + (t1 - t0)); System.out.println( StringTools.dumpBytes( bb.array() )); } }
shared/ldap/src/test/java/org/apache/directory/shared/ldap/codec/bind/BindRequestPerfTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.shared.ldap.codec.bind; import java.nio.ByteBuffer; import java.util.List; import javax.naming.NamingException; import org.apache.directory.shared.asn1.ber.Asn1Decoder; import org.apache.directory.shared.asn1.ber.IAsn1Container; import org.apache.directory.shared.asn1.codec.DecoderException; import org.apache.directory.shared.asn1.codec.EncoderException; import org.apache.directory.shared.ldap.codec.Control; import org.apache.directory.shared.ldap.codec.LdapDecoder; import org.apache.directory.shared.ldap.codec.LdapMessage; import org.apache.directory.shared.ldap.codec.LdapMessageContainer; import org.apache.directory.shared.ldap.codec.bind.BindRequest; import org.apache.directory.shared.ldap.codec.bind.SimpleAuthentication; import org.apache.directory.shared.ldap.name.LdapDN; import org.apache.directory.shared.ldap.util.StringTools; import junit.framework.TestCase; /** * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class BindRequestPerfTest extends TestCase { /** * Test the decoding of a BindRequest with Simple authentication and no * controls */ public void testDecodeBindRequestSimpleNoControlsPerf() { Asn1Decoder ldapDecoder = new LdapDecoder(); ByteBuffer stream = ByteBuffer.allocate( 0x52 ); stream.put( new byte[] { 0x30, 0x50, // LDAPMessage ::=SEQUENCE { 0x02, 0x01, 0x01, // messageID MessageID 0x60, 0x2E, // CHOICE { ..., bindRequest BindRequest, ... // BindRequest ::= APPLICATION[0] SEQUENCE { 0x02, 0x01, 0x03, // version INTEGER (1..127), 0x04, 0x1F, // name LDAPDN, 'u', 'i', 'd', '=', 'a', 'k', 'a', 'r', 'a', 's', 'u', 'l', 'u', ',', 'd', 'c', '=', 'e', 'x', 'a', 'm', 'p', 'l', 'e', ',', 'd', 'c', '=', 'c', 'o', 'm', ( byte ) 0x80, 0x08, // authentication AuthenticationChoice // AuthenticationChoice ::= CHOICE { simple [0] OCTET STRING, // ... 'p', 'a', 's', 's', 'w', 'o', 'r', 'd', ( byte ) 0xA0, 0x1B, // A control 0x30, 0x19, 0x04, 0x17, 0x32, 0x2E, 0x31, 0x36, 0x2E, 0x38, 0x34, 0x30, 0x2E, 0x31, 0x2E, 0x31, 0x31, 0x33, 0x37, 0x33, 0x30, 0x2E, 0x33, 0x2E, 0x34, 0x2E, 0x32 } ); String decodedPdu = StringTools.dumpBytes( stream.array() ); stream.flip(); // Allocate a LdapMessage Container IAsn1Container ldapMessageContainer = new LdapMessageContainer(); // Decode the BindRequest PDU try { int nbLoops = 1000000; long t0 = System.currentTimeMillis(); for ( int i = 0; i < nbLoops; i++ ) { ldapDecoder.decode( stream, ldapMessageContainer ); ( ( LdapMessageContainer ) ldapMessageContainer).clean(); stream.flip(); } long t1 = System.currentTimeMillis(); System.out.println( "testDecodeBindRequestSimpleNoControlsPerf, " + nbLoops + " loops, Delta = " + ( t1 - t0 ) ); ldapDecoder.decode( stream, ldapMessageContainer ); } catch ( DecoderException de ) { de.printStackTrace(); fail( de.getMessage() ); } catch ( NamingException ne ) { ne.printStackTrace(); fail( ne.getMessage() ); } // Check the decoded BindRequest LdapMessage message = ( ( LdapMessageContainer ) ldapMessageContainer ).getLdapMessage(); BindRequest br = message.getBindRequest(); assertEquals( 1, message.getMessageId() ); assertEquals( 3, br.getVersion() ); assertEquals( "uid=akarasulu,dc=example,dc=com", br.getName().toString() ); assertEquals( true, ( br.getAuthentication() instanceof SimpleAuthentication ) ); assertEquals( "password", StringTools.utf8ToString( ( ( SimpleAuthentication ) br.getAuthentication() ) .getSimple() ) ); // Check the Control List controls = message.getControls(); assertEquals( 1, controls.size() ); Control control = message.getControls( 0 ); assertEquals( "2.16.840.1.113730.3.4.2", control.getControlType() ); assertEquals( "", StringTools.dumpBytes( ( byte[] ) control.getControlValue() ) ); // Check the length assertEquals( 0x52, message.computeLength() ); // Check the encoding try { ByteBuffer bb = message.encode( null ); String encodedPdu = StringTools.dumpBytes( bb.array() ); assertEquals( encodedPdu, decodedPdu ); } catch ( EncoderException ee ) { ee.printStackTrace(); fail( ee.getMessage() ); } } /** * Test the decoding of a BindRequest with Simple authentication and no * controls */ public void testEncodeBindRequestPerf() throws Exception { LdapDN name = new LdapDN( "uid=akarasulu,dc=example,dc=com" ); int nbLoops = 10000000; long t0 = System.currentTimeMillis(); ByteBuffer bb=null; for ( int i = 0; i< nbLoops; i++) { // Check the decoded BindRequest LdapMessage message = new LdapMessage(); message.setMessageId( 1 ); BindRequest br = new BindRequest(); br.setMessageId( 1 ); br.setName( name ); Control control = new Control(); control.setControlType( "2.16.840.1.113730.3.4.2" ); LdapAuthentication authentication = new SimpleAuthentication(); ((SimpleAuthentication)authentication).setSimple( StringTools.getBytesUtf8( "password" ) ); br.addControl( control ); br.setAuthentication( authentication ); message.setProtocolOP( br ); // Check the encoding try { bb = message.encode( null ); } catch ( EncoderException ee ) { ee.printStackTrace(); fail( ee.getMessage() ); } } long t1 = System.currentTimeMillis(); System.out.println( "BindRequest testEncodeBindRequestPerf, " + nbLoops + " loops, Delta = " + (t1 - t0)); System.out.println( StringTools.dumpBytes( bb.array() )); } }
Changed the number of loop in order to avoid a very long running test git-svn-id: 5c3b06693d750a6aefbf1081b6b7d57c0165fdb2@463338 13f79535-47bb-0310-9956-ffa450edef68
shared/ldap/src/test/java/org/apache/directory/shared/ldap/codec/bind/BindRequestPerfTest.java
Changed the number of loop in order to avoid a very long running test
Java
apache-2.0
7bda17c7a49b0eb1670135bad7cb28799724ea7d
0
opencb/cellbase,Swaathik/cellbase,Swaathik/cellbase,Swaathik/cellbase,dapregi/cellbase,dapregi/cellbase,opencb/cellbase,Swaathik/cellbase,dapregi/cellbase,opencb/cellbase,opencb/cellbase,Swaathik/cellbase,dapregi/cellbase,dapregi/cellbase,opencb/cellbase,dapregi/cellbase,dapregi/cellbase,Swaathik/cellbase,opencb/cellbase,opencb/cellbase,Swaathik/cellbase
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.app.cli; import com.beust.jcommander.ParameterException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.commons.collections.map.HashedMap; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.StringUtils; import org.opencb.cellbase.core.CellBaseConfiguration.SpeciesProperties.Species; import org.opencb.commons.utils.FileUtils; import java.io.*; import java.net.URI; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.*; /** * Created by imedina on 03/02/15. */ public class DownloadCommandExecutor extends CommandExecutor { private CliOptionsParser.DownloadCommandOptions downloadCommandOptions; private Path output = null; private Path common = null; private File ensemblScriptsFolder; private String ensemblVersion; private String ensemblRelease; private Species species; private static final String[] VARIATION_FILES = {"variation.txt.gz", "variation_feature.txt.gz", "transcript_variation.txt.gz", "variation_synonym.txt.gz", "seq_region.txt.gz", "source.txt.gz", "attrib.txt.gz", "attrib_type.txt.gz", "seq_region.txt.gz", "structural_variation_feature.txt.gz", "study.txt.gz", "phenotype.txt.gz", "phenotype_feature.txt.gz", "phenotype_feature_attrib.txt.gz", "motif_feature_variation.txt.gz", "genotype_code.txt.gz", "allele_code.txt.gz", "population_genotype.txt.gz", "population.txt.gz", "allele.txt.gz", }; private static final String[] REGULATION_FILES = {"AnnotatedFeatures.gff.gz", "MotifFeatures.gff.gz", "RegulatoryFeatures_MultiCell.gff.gz", }; private static final Map<String, String> GENE_UNIPROT_XREF_FILES = new HashMap() { { put("Homo sapiens", "HUMAN_9606_idmapping_selected.tab.gz"); put("Mus musculus", "MOUSE_10090_idmapping_selected.tab.gz"); put("Rattus norvegicus", "RAT_10116_idmapping_selected.tab.gz"); put("Danio rerio", "DANRE_7955_idmapping_selected.tab.gz"); put("Drosophila melanogaster", "DROME_7227_idmapping_selected.tab.gz"); put("Saccharomyces cerevisiae", "YEAST_559292_idmapping_selected.tab.gz"); } }; private static final String ENSEMBL_NAME = "ENSEMBL"; private static final String GENE_EXPRESSION_ATLAS_NAME = "Gene Expression Atlas"; private static final String HPO_NAME = "HPO"; private static final String DISGENET_NAME = "DisGeNET"; private static final String DGIDB_NAME = "DGIdb"; private static final String UNIPROT_NAME = "DGIdb"; private static final String CADD_NAME = "CADD"; private static final String GENOME_DATA = "genome"; private static final String GENE_DATA = "gene"; private static final String GENE_DISEASE_ASSOCIATION_DATA = "gene_disease_association"; private static final String VARIATION_DATA = "variation"; private static final String VARIATION_FUNCTIONAL_SCORE_DATA = "variation_functional_score"; public DownloadCommandExecutor(CliOptionsParser.DownloadCommandOptions downloadCommandOptions) { super(downloadCommandOptions.commonOptions.logLevel, downloadCommandOptions.commonOptions.verbose, downloadCommandOptions.commonOptions.conf); this.downloadCommandOptions = downloadCommandOptions; if (downloadCommandOptions.output != null) { output = Paths.get(downloadCommandOptions.output); } if (downloadCommandOptions.common != null) { common = Paths.get(downloadCommandOptions.common); } else { common = output.resolve("common"); } this.ensemblScriptsFolder = new File(System.getProperty("basedir") + "/bin/ensembl-scripts/"); } /** * Execute specific 'download' command options. */ public void execute() { try { if (downloadCommandOptions.species != null && !downloadCommandOptions.species.isEmpty()) { // We need to get the Species object from the CLI name // This can be the scientific or common name, or the ID // Species speciesToDownload = null; for (Species sp : configuration.getAllSpecies()) { if (downloadCommandOptions.species.equalsIgnoreCase(sp.getScientificName()) || downloadCommandOptions.species.equalsIgnoreCase(sp.getCommonName()) || downloadCommandOptions.species.equalsIgnoreCase(sp.getId())) { species = sp; break; } } // If everything is right we launch the download if (species != null) { processSpecies(species); } else { logger.error("Species '{}' not valid", downloadCommandOptions.species); } } else { logger.error("--species parameter '{}' not valid", downloadCommandOptions.species); } } catch (ParameterException e) { logger.error("Error in 'download' command line: " + e.getMessage()); } catch (IOException | InterruptedException e) { logger.error("Error downloading '" + downloadCommandOptions.species + "' files: " + e.getMessage()); } } private void processSpecies(Species sp) throws IOException, InterruptedException { logger.info("Processing species " + sp.getScientificName()); // We need to find which is the correct Ensembl host URL. // This can different depending on if is a vertebrate species. String ensemblHostUrl; if (configuration.getSpecies().getVertebrates().contains(sp)) { ensemblHostUrl = configuration.getDownload().getEnsembl().getUrl().getHost(); } else { ensemblHostUrl = configuration.getDownload().getEnsemblGenomes().getUrl().getHost(); } // Getting the assembly. // By default the first assembly in the configuration.json Species.Assembly assembly = null; if (downloadCommandOptions.assembly == null || downloadCommandOptions.assembly.isEmpty()) { assembly = sp.getAssemblies().get(0); } else { for (Species.Assembly assembly1 : sp.getAssemblies()) { if (downloadCommandOptions.assembly.equalsIgnoreCase(assembly1.getName())) { assembly = assembly1; break; } } } // Checking that the species and assembly are correct if (ensemblHostUrl == null || assembly == null) { logger.error("Something is not correct, check the species '{}' or the assembly '{}'", downloadCommandOptions.species, downloadCommandOptions.assembly); return; } // Output folder creation String spShortName = sp.getScientificName().toLowerCase() .replaceAll("\\.", "") .replaceAll("\\)", "") .replaceAll("\\(", "") .replaceAll("[-/]", " ") .replaceAll("\\s+", "_"); String spAssembly = assembly.getName().toLowerCase(); Path spFolder = output.resolve(spShortName + "_" + spAssembly); makeDir(spFolder); makeDir(common); ensemblVersion = assembly.getEnsemblVersion(); ensemblRelease = "release-" + ensemblVersion.split("_")[0]; if (downloadCommandOptions.data != null && !downloadCommandOptions.data.isEmpty()) { List<String> dataList; if (downloadCommandOptions.data.equals("all")) { dataList = sp.getData(); } else { dataList = Arrays.asList(downloadCommandOptions.data.split(",")); } for (String data : dataList) { switch (data) { case GENOME_DATA: downloadReferenceGenome(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl); break; case GENE_DATA: downloadEnsemblGene(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl); break; case GENE_DISEASE_ASSOCIATION_DATA: if (speciesHasInfoToDownload(sp, "gene_disease_association")) { downloadGeneDiseaseAssociation(sp, spFolder); } break; case VARIATION_DATA: if (speciesHasInfoToDownload(sp, "variation")) { downloadVariation(sp, spShortName, spFolder, ensemblHostUrl); } break; case VARIATION_FUNCTIONAL_SCORE_DATA: if (speciesHasInfoToDownload(sp, "variation_functional_score")) { downloadCaddScores(sp, assembly.getName(), spFolder); } break; case "regulation": if (speciesHasInfoToDownload(sp, "regulation")) { downloadRegulation(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl); } break; case "protein": if (speciesHasInfoToDownload(sp, "protein")) { downloadProtein(); } break; case "conservation": if (speciesHasInfoToDownload(sp, "conservation")) { downloadConservation(sp, assembly.getName(), spFolder); } break; case "clinical": if (speciesHasInfoToDownload(sp, "clinical")) { downloadClinical(sp, spFolder); } break; default: System.out.println("This data parameter is not allowed"); break; } } } } private boolean speciesHasInfoToDownload(Species sp, String info) { boolean hasInfo = true; if (sp.getData() == null || !sp.getData().contains(info)) { logger.warn("Species '{}' has no '{}' information available to download", sp.getScientificName(), info); hasInfo = false; } return hasInfo; } private String getPhylo(Species sp) { if (configuration.getSpecies().getVertebrates().contains(sp)) { return "vertebrates"; } else if (configuration.getSpecies().getMetazoa().contains(sp)) { return "metazoa"; } else if (configuration.getSpecies().getFungi().contains(sp)) { return "fungi"; } else if (configuration.getSpecies().getProtist().contains(sp)) { return "protists"; } else if (configuration.getSpecies().getPlants().contains(sp)) { return "plants"; } else { throw new ParameterException("Species " + sp.getScientificName() + " not associated to any phylo in the configuration file"); } } private void downloadReferenceGenome(Species sp, String shortName, String assembly, Path spFolder, String host) throws IOException, InterruptedException { logger.info("Downloading genome information ..."); Path sequenceFolder = spFolder.resolve("genome"); makeDir(sequenceFolder); /** * Reference genome sequences are downloaded from Ensembl */ String url = host + "/" + ensemblRelease; if (sp.getScientificName().equals("Homo sapiens")) { // New Homo sapiens assemblies contain too many ALT regions, // so we download 'primary_assembly' file url = url + "/fasta/" + shortName + "/dna/*.dna.primary_assembly.fa.gz"; } else { if (!configuration.getSpecies().getVertebrates().contains(sp)) { url = host + "/" + ensemblRelease + "/" + getPhylo(sp); } url = url + "/fasta/" + shortName + "/dna/*.dna.toplevel.fa.gz"; } String outputFileName = StringUtils.capitalize(shortName) + "." + assembly + ".fa.gz"; Path outputPath = sequenceFolder.resolve(outputFileName); downloadFile(url, outputPath.toString()); logger.info("Saving reference genome version data at {}", sequenceFolder.resolve("version.json")); saveVersionData(GENOME_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), Collections.singletonList(url), sequenceFolder.resolve("version.json")); } private String getTimeStamp() { return new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()); } private void saveVersionData(String data, String source, String version, String date, List<String> url, Path outputFilePath) { Map versionData = new HashedMap(); versionData.put("Data", data); versionData.put("Source", source); versionData.put("Version", version); versionData.put("Download date", date); versionData.put("URL", url); writeVersionDataFile(versionData, outputFilePath); } private void writeVersionDataFile(Map versionData, Path outputFilePath) { try { OutputStream os = Files.newOutputStream(outputFilePath); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(os)); ObjectMapper jsonObjectMapper = new ObjectMapper(); ObjectWriter jsonObjectWriter = jsonObjectMapper.writer(); bw.write(jsonObjectWriter.writeValueAsString(versionData) + "\n"); } catch (IOException e) { e.printStackTrace(); } } private void downloadEnsemblGene(Species sp, String spShortName, String assembly, Path speciesFolder, String host) throws IOException, InterruptedException { logger.info("Downloading gene information ..."); Path geneFolder = speciesFolder.resolve("gene"); makeDir(geneFolder); downloadEnsemblData(sp, spShortName, geneFolder, host); downloadDrugData(sp, speciesFolder); downloadGeneUniprotXref(sp, geneFolder); downloadGeneExpressionAtlas(); downloadGeneDiseaseAnnotation(geneFolder); runGeneExtraInfo(sp, assembly, geneFolder); } private void downloadDrugData(Species species, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens")) { logger.info("Downloading drug-gene data..."); Path geneDrugFolder = speciesFolder.resolve("gene/geneDrug"); makeDir(geneDrugFolder); String url = configuration.getDownload().getDgidb().getHost(); downloadFile(url, geneDrugFolder.resolve("dgidb.tsv").toString()); saveVersionData(GENE_DATA, DGIDB_NAME, null, getTimeStamp(), Collections.singletonList(url), geneDrugFolder.resolve("dgidbVersion.json")); } } private void downloadEnsemblData(Species sp, String spShortName, Path geneFolder, String host) throws IOException, InterruptedException { logger.info("Downloading gene Ensembl data (gtf, pep, cdna, motifs) ..."); List<String> downloadedUrls = new ArrayList<>(4); String ensemblHost = host + "/" + ensemblRelease; if (!configuration.getSpecies().getVertebrates().contains(sp)) { ensemblHost = host + "/" + ensemblRelease + "/" + getPhylo(sp); } // Ensembl leaves now several GTF files in the FTP folder, we need to build a more accurate URL // to download the correct GTF file. String version = ensemblRelease.split("-")[1]; String url = ensemblHost + "/gtf/" + spShortName + "/*" + version + ".gtf.gz"; String fileName = geneFolder.resolve(spShortName + ".gtf.gz").toString(); downloadFile(url, fileName); downloadedUrls.add(url); url = ensemblHost + "/fasta/" + spShortName + "/pep/*.pep.all.fa.gz"; fileName = geneFolder.resolve(spShortName + ".pep.all.fa.gz").toString(); downloadFile(url, fileName); downloadedUrls.add(url); url = ensemblHost + "/fasta/" + spShortName + "/cdna/*.cdna.all.fa.gz"; fileName = geneFolder.resolve(spShortName + ".cdna.all.fa.gz").toString(); downloadFile(url, fileName); downloadedUrls.add(url); url = ensemblHost + "/regulation/" + spShortName + "/MotifFeatures.gff.gz"; Path outputFile = geneFolder.resolve("MotifFeatures.gff.gz"); downloadFile(url, outputFile.toString()); downloadedUrls.add(url); saveVersionData(GENE_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), downloadedUrls, geneFolder.resolve("ensemblVersion.json")); } private void downloadGeneUniprotXref(Species sp, Path geneFolder) throws IOException, InterruptedException { logger.info("Downloading UniProt ID mapping ..."); if (GENE_UNIPROT_XREF_FILES.containsKey(sp.getScientificName())) { String geneGtfUrl = configuration.getDownload().getGeneUniprotXref().getHost() + "/" + GENE_UNIPROT_XREF_FILES.get(sp.getScientificName()); downloadFile(geneGtfUrl, geneFolder.resolve("idmapping_selected.tab.gz").toString()); downloadFile(getUniProtReleaseNotesUrl(), geneFolder.resolve("uniprotRelnotes.txt").toString()); saveVersionData(GENE_DATA, UNIPROT_NAME, getUniProtRelease(geneFolder.resolve("uniprotRelnotes.txt").toString()), getTimeStamp(), Collections.singletonList(geneGtfUrl), geneFolder.resolve("uniprotVersion.json")); } } private String getUniProtRelease(String relnotesFilename) { Path path = Paths.get(relnotesFilename); Files.exists(path); try { // The first line at the relnotes.txt file contains the UniProt release BufferedReader reader = Files.newBufferedReader(path, Charset.defaultCharset()); String release = reader.readLine().split(" ")[2]; reader.close(); return release; } catch (IOException e) { e.printStackTrace(); } return null; } private String getUniProtReleaseNotesUrl() { return URI.create(configuration.getDownload().getGeneUniprotXref().getHost()).resolve("../../../").toString() + "/relnotes.txt"; } private void downloadGeneExpressionAtlas() throws IOException, InterruptedException { logger.info("Downloading gene expression atlas ..."); // Path expression = geneFolder.getParent().resolve("common").resolve("expression"); Path expression = common.resolve("expression"); if (!Files.exists(expression)) { makeDir(expression); String geneGtfUrl = configuration.getDownload().getGeneExpressionAtlas().getHost(); downloadFile(geneGtfUrl, expression.resolve("allgenes_updown_in_organism_part.tab.gz").toString()); saveVersionData(GENE_DATA, GENE_EXPRESSION_ATLAS_NAME, getGeneExpressionAtlasVersion(), getTimeStamp(), Collections.singletonList(geneGtfUrl), expression.resolve("geneExpressionAtlasVersion.json")); } } private String getGeneExpressionAtlasVersion() { return FilenameUtils.getBaseName(configuration.getDownload().getGeneExpressionAtlas().getHost()) .split("_")[5].replace(".tab.gz", ""); } private void downloadGeneDiseaseAnnotation(Path geneFolder) throws IOException, InterruptedException { logger.info("Downloading gene disease annotation ..."); String host = configuration.getDownload().getHpo().getHost(); String fileName = StringUtils.substringAfterLast(host, "/"); downloadFile(host, geneFolder.resolve(fileName).toString()); saveVersionData(GENE_DATA, HPO_NAME, null, getTimeStamp(), Collections.singletonList(host), geneFolder.resolve("hpoVersion.json")); host = configuration.getDownload().getDisgenet().getHost(); String readme = configuration.getDownload().getDisgenetReadme().getHost(); fileName = StringUtils.substringAfterLast(host, "/"); downloadFile(host, geneFolder.resolve(fileName).toString()); downloadFile(readme, geneFolder.resolve("disgenetReadme.txt").toString()); saveVersionData(GENE_DISEASE_ASSOCIATION_DATA, DISGENET_NAME, getDisgenetVersion(geneFolder.resolve("disgenetReadme.txt")), getTimeStamp(), Collections.singletonList(host), geneFolder.resolve("disgenetVersion.json")); } private String getDisgenetVersion(Path path) { Files.exists(path); try { BufferedReader reader = Files.newBufferedReader(path, Charset.defaultCharset()); String line = reader.readLine(); // There shall be a line at the README.txt containing the version. // e.g. The files in the current directory contain the data corresponding to the latest release (version 4.0, April 2016). ... while (line != null) { if (line.matches("\\(version")) { return line.split("\\(")[1].split("\\)")[0]; } } } catch (IOException e) { e.printStackTrace(); } return null; } private void runGeneExtraInfo(Species sp, String assembly, Path geneFolder) throws IOException, InterruptedException { logger.info("Downloading gene extra info ..."); String geneExtraInfoLogFile = geneFolder.resolve("gene_extra_info.log").toString(); List<String> args = new ArrayList<>(); if (sp.getScientificName().equals("Homo sapiens") && assembly.equalsIgnoreCase("GRCh37")) { args.addAll(Arrays.asList("--species", sp.getScientificName(), "--outdir", geneFolder.toAbsolutePath().toString(), "--ensembl-libs", configuration.getDownload().getEnsembl().getLibs() .replace("79", "75"))); } else { args.addAll(Arrays.asList("--species", sp.getScientificName(), "--outdir", geneFolder.toAbsolutePath().toString(), "--ensembl-libs", configuration.getDownload().getEnsembl().getLibs())); } if (!configuration.getSpecies().getVertebrates().contains(species) && !species.getScientificName().equals("Drosophila melanogaster")) { args.add("--phylo"); args.add("no-vertebrate"); } // run gene_extra_info.pl boolean geneExtraInfoDownloaded = runCommandLineProcess(ensemblScriptsFolder, "./gene_extra_info.pl", args, geneExtraInfoLogFile); // check output if (geneExtraInfoDownloaded) { logger.info("Gene extra files created OK"); } else { logger.error("Gene extra info for " + sp.getScientificName() + " cannot be downloaded"); } } private void downloadVariation(Species sp, String shortName, Path spFolder, String host) throws IOException, InterruptedException { logger.info("Downloading variation information ..."); Path variationFolder = spFolder.resolve("variation"); makeDir(variationFolder); String variationUrl = host + "/" + ensemblRelease; if (!configuration.getSpecies().getVertebrates().contains(sp)) { variationUrl = host + "/" + ensemblRelease + "/" + getPhylo(sp); } variationUrl = variationUrl + "/mysql/" + shortName + "_variation_" + ensemblVersion; List<String> downloadedUrls = new ArrayList<>(VARIATION_FILES.length); for (String variationFile : VARIATION_FILES) { Path outputFile = variationFolder.resolve(variationFile); downloadFile(variationUrl + "/" + variationFile, outputFile.toString()); downloadedUrls.add(variationUrl + "/" + variationFile); } saveVersionData(VARIATION_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), downloadedUrls, variationFolder.resolve("ensemblVersion.json")); } private void downloadRegulation(Species species, String shortName, String assembly, Path speciesFolder, String host) throws IOException, InterruptedException { logger.info("Downloading regulation information ..."); Path regulationFolder = speciesFolder.resolve("regulation"); makeDir(regulationFolder); // Downloading Ensembl Regulation String regulationUrl = host + "/" + ensemblRelease; if (!configuration.getSpecies().getVertebrates().contains(species)) { regulationUrl = host + "/" + ensemblRelease + "/" + getPhylo(species); } regulationUrl = regulationUrl + "/regulation/" + shortName; for (String regulationFile : REGULATION_FILES) { Path outputFile = regulationFolder.resolve(regulationFile); downloadFile(regulationUrl + "/" + regulationFile, outputFile.toString()); } // Downloading miRNA info String url; Path mirbaseFolder = common.resolve("mirbase"); if (!Files.exists(mirbaseFolder)) { makeDir(mirbaseFolder); url = configuration.getDownload().getMirbase().getHost() + "/miRNA.xls.gz"; downloadFile(url, mirbaseFolder.resolve("miRNA.xls.gz").toString()); url = configuration.getDownload().getMirbase().getHost() + "/aliases.txt.gz"; downloadFile(url, mirbaseFolder.resolve("aliases.txt.gz").toString()); } if (species.getScientificName().equals("Homo sapiens")) { if (assembly.equalsIgnoreCase("GRCh37")) { url = configuration.getDownload().getTargetScan().getHost() + "/hg19/database/targetScanS.txt.gz"; downloadFile(url, regulationFolder.resolve("targetScanS.txt.gz").toString()); url = configuration.getDownload().getMiRTarBase().getHost() + "/hsa_MTI.xls"; downloadFile(url, regulationFolder.resolve("hsa_MTI.xls").toString()); } } if (species.getScientificName().equals("Mus musculus")) { url = configuration.getDownload().getTargetScan().getHost() + "/mm9/database/targetScanS.txt.gz"; downloadFile(url, regulationFolder.resolve("targetScanS.txt.gz").toString()); url = configuration.getDownload().getMiRTarBase().getHost() + "/mmu_MTI.xls"; downloadFile(url, regulationFolder.resolve("mmu_MTI.xls").toString()); } } /** * This method downloads UniProt, IntAct and Interpro data from EMBL-EBI. * * @throws IOException * @throws InterruptedException */ private void downloadProtein() throws IOException, InterruptedException { logger.info("Downloading protein information ..."); Path proteinFolder = common.resolve("protein"); if (!Files.exists(proteinFolder)) { makeDir(proteinFolder); String url = configuration.getDownload().getUniprot().getHost(); downloadFile(url, proteinFolder.resolve("uniprot_sprot.xml.gz").toString()); makeDir(proteinFolder.resolve("uniprot_chunks")); splitUniprot(proteinFolder.resolve("uniprot_sprot.xml.gz"), proteinFolder.resolve("uniprot_chunks")); url = configuration.getDownload().getIntact().getHost(); downloadFile(url, proteinFolder.resolve("intact.txt").toString()); url = configuration.getDownload().getInterpro().getHost(); downloadFile(url, proteinFolder.resolve("protein2ipr.dat.gz").toString()); } else { logger.info("Protein: skipping this since it is already downloaded. Delete 'protein' folder to force download"); } } private void splitUniprot(Path uniprotFilePath, Path splitOutdirPath) throws IOException { BufferedReader br = FileUtils.newBufferedReader(uniprotFilePath); PrintWriter pw = null; StringBuilder header = new StringBuilder(); boolean beforeEntry = true; boolean inEntry = false; int count = 0; int chunk = 0; String line; while ((line = br.readLine()) != null) { if (line.trim().startsWith("<entry ")) { inEntry = true; beforeEntry = false; if (count % 10000 == 0) { pw = new PrintWriter(new FileOutputStream(splitOutdirPath.resolve("chunk_" + chunk + ".xml").toFile())); pw.println(header.toString().trim()); } count++; } if (beforeEntry) { header.append(line).append("\n"); } if (inEntry) { pw.println(line); } if (line.trim().startsWith("</entry>")) { inEntry = false; if (count % 10000 == 0) { pw.print("</uniprot>"); pw.close(); chunk++; } } } pw.print("</uniprot>"); pw.close(); br.close(); } /** * This method downloads bith PhastCons and PhyloP data from UCSC for Human and Mouse species. * * @param species The Species object to download the data * @param assembly The assembly required * @param speciesFolder Output folder to download the data * @throws IOException * @throws InterruptedException */ private void downloadConservation(Species species, String assembly, Path speciesFolder) throws IOException, InterruptedException { logger.info("Downloading conservation information ..."); Path conservationFolder = speciesFolder.resolve("conservation"); if (species.getScientificName().equals("Homo sapiens")) { makeDir(conservationFolder); makeDir(conservationFolder.resolve("phastCons")); makeDir(conservationFolder.resolve("phylop")); makeDir(conservationFolder.resolve("gerp")); String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "X", "Y", "M", }; if (assembly.equalsIgnoreCase("GRCh37")) { logger.debug("Downloading GERP++ ..."); downloadFile(configuration.getDownload().getGerp().getHost(), conservationFolder.resolve("gerp/hg19.GERP_scores.tar.gz").toAbsolutePath().toString()); String url = configuration.getDownload().getConservation().getHost() + "/hg19"; for (int i = 0; i < chromosomes.length; i++) { String phastConsUrl = url + "/phastCons46way/primates/chr" + chromosomes[i] + ".phastCons46way.primates.wigFix.gz"; downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i] + ".phastCons46way.primates.wigFix.gz").toString()); String phyloPUrl = url + "/phyloP46way/primates/chr" + chromosomes[i] + ".phyloP46way.primate.wigFix.gz"; downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i] + ".phyloP46way.primate.wigFix.gz").toString()); } } if (assembly.equalsIgnoreCase("GRCh38")) { String url = configuration.getDownload().getConservation().getHost() + "/hg38"; for (int i = 0; i < chromosomes.length; i++) { String phastConsUrl = url + "/phastCons100way/hg38.100way.phastCons/chr" + chromosomes[i] + ".phastCons100way.wigFix.gz"; downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i] + ".phastCons100way.wigFix.gz").toString()); String phyloPUrl = url + "/phyloP100way/hg38.100way.phyloP100way/chr" + chromosomes[i] + ".phyloP100way.wigFix.gz"; downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i] + ".phyloP100way.wigFix.gz").toString()); } // String phastConsUrl = url + "/phastCons7way/hg38.phastCons100way.wigFix.gz"; // Path outFile = conservationFolder.resolve("phastCons").resolve("hg38.phastCons100way.wigFix.gz"); // downloadFile(phastConsUrl, outFile.toString()); // // String phyloPUrl = url + "/phyloP7way/hg38.phyloP100way.wigFix.gz"; // outFile = conservationFolder.resolve("phylop").resolve("hg38.phyloP100way.wigFix.gz"); // downloadFile(phyloPUrl, outFile.toString()); } } if (species.getScientificName().equals("Mus musculus")) { makeDir(conservationFolder); makeDir(conservationFolder.resolve("phastCons")); makeDir(conservationFolder.resolve("phylop")); String url = configuration.getDownload().getConservation().getHost() + "/mm10"; String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "X", "Y", "M", }; for (int i = 0; i < chromosomes.length; i++) { String phastConsUrl = url + "/phastCons60way/mm10.60way.phastCons/chr" + chromosomes[i] + ".phastCons60way.wigFix.gz"; downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i] + ".phastCons60way.wigFix.gz").toString()); String phyloPUrl = url + "/phyloP60way/mm10.60way.phyloP60way/chr" + chromosomes[i] + ".phyloP60way.wigFix.gz"; downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i] + ".phyloP60way.wigFix.gz").toString()); } } } private void downloadClinical(Species species, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens")) { logger.info("Downloading clinical information ..."); Path clinicalFolder = speciesFolder.resolve("clinical"); makeDir(clinicalFolder); String url = configuration.getDownload().getClinvar().getHost(); downloadFile(url, clinicalFolder.resolve("ClinVar.xml.gz").toString()); url = configuration.getDownload().getClinvarEfoTerms().getHost(); downloadFile(url, clinicalFolder.resolve("ClinVar_Traits_EFO_Names.csv").toString()); url = configuration.getDownload().getClinvarSummary().getHost(); downloadFile(url, clinicalFolder.resolve("variant_summary.txt.gz").toString()); url = configuration.getDownload().getGwasCatalog().getHost(); downloadFile(url, clinicalFolder.resolve("gwas_catalog.tsv").toString()); url = configuration.getDownload().getDbsnp().getHost(); downloadFile(url, clinicalFolder.resolve("All.vcf.gz").toString()); url = url + ".tbi"; downloadFile(url, clinicalFolder.resolve("All.vcf.gz.tbi").toString()); } } private void downloadGeneDiseaseAssociation(Species species, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens")) { logger.info("Downloading gene to disease information ..."); Path gene2diseaseFolder = speciesFolder.resolve("gene_disease_association"); makeDir(gene2diseaseFolder); // Downloads DisGeNET String url = configuration.getDownload().getDisgenet().getHost(); String readmeUrl = configuration.getDownload().getDisgenetReadme().getHost(); downloadFile(url, gene2diseaseFolder.resolve("all_gene_disease_associations.txt.gz").toString()); downloadFile(readmeUrl, gene2diseaseFolder.resolve("disgenetReadme.txt").toString()); saveVersionData(GENE_DISEASE_ASSOCIATION_DATA, DISGENET_NAME, getDisgenetVersion(gene2diseaseFolder.resolve("disgenetReadme.txt")), getTimeStamp(), Collections.singletonList(url), gene2diseaseFolder.resolve("disgenetVersion.json")); // Downloads HPO url = configuration.getDownload().getHpo().getHost(); downloadFile(url, gene2diseaseFolder.resolve("ALL_SOURCES_ALL_FREQUENCIES_diseases_to_genes_to_phenotypes.txt").toString()); saveVersionData(GENE_DISEASE_ASSOCIATION_DATA, HPO_NAME, null, getTimeStamp(), Collections.singletonList(url), gene2diseaseFolder.resolve("hpoVersion.json")); } } private void downloadCaddScores(Species species, String assembly, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens") && assembly.equalsIgnoreCase("GRCh37")) { logger.info("Downloading CADD scores information ..."); Path variationFunctionalScoreFolder = speciesFolder.resolve("variation_functional_score"); makeDir(variationFunctionalScoreFolder); // Downloads CADD scores String url = configuration.getDownload().getCadd().getHost(); downloadFile(url, variationFunctionalScoreFolder.resolve("whole_genome_SNVs.tsv.gz").toString()); saveVersionData(VARIATION_FUNCTIONAL_SCORE_DATA, CADD_NAME, url.split("/")[5], getTimeStamp(), Collections.singletonList(url), variationFunctionalScoreFolder.resolve("caddVersion.json")); } } private void downloadFile(String url, String outputFileName) throws IOException, InterruptedException { List<String> wgetArgs = Arrays.asList("--tries=10", url, "-O", outputFileName, "-o", outputFileName + ".log"); boolean downloaded = runCommandLineProcess(null, "wget", wgetArgs, null); if (downloaded) { logger.info(outputFileName + " created OK"); } else { logger.warn(url + " cannot be downloaded"); } } }
cellbase-app/src/main/java/org/opencb/cellbase/app/cli/DownloadCommandExecutor.java
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.app.cli; import com.beust.jcommander.ParameterException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.commons.collections.map.HashedMap; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.StringUtils; import org.opencb.cellbase.core.CellBaseConfiguration.SpeciesProperties.Species; import org.opencb.commons.utils.FileUtils; import java.io.*; import java.net.URI; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.*; /** * Created by imedina on 03/02/15. */ public class DownloadCommandExecutor extends CommandExecutor { private CliOptionsParser.DownloadCommandOptions downloadCommandOptions; private Path output = null; private Path common = null; private File ensemblScriptsFolder; private String ensemblVersion; private String ensemblRelease; private Species species; private static final String[] VARIATION_FILES = {"variation.txt.gz", "variation_feature.txt.gz", "transcript_variation.txt.gz", "variation_synonym.txt.gz", "seq_region.txt.gz", "source.txt.gz", "attrib.txt.gz", "attrib_type.txt.gz", "seq_region.txt.gz", "structural_variation_feature.txt.gz", "study.txt.gz", "phenotype.txt.gz", "phenotype_feature.txt.gz", "phenotype_feature_attrib.txt.gz", "motif_feature_variation.txt.gz", "genotype_code.txt.gz", "allele_code.txt.gz", "population_genotype.txt.gz", "population.txt.gz", "allele.txt.gz", }; private static final String[] REGULATION_FILES = {"AnnotatedFeatures.gff.gz", "MotifFeatures.gff.gz", "RegulatoryFeatures_MultiCell.gff.gz", }; private static final Map<String, String> GENE_UNIPROT_XREF_FILES = new HashMap() { { put("Homo sapiens", "HUMAN_9606_idmapping_selected.tab.gz"); put("Mus musculus", "MOUSE_10090_idmapping_selected.tab.gz"); put("Rattus norvegicus", "RAT_10116_idmapping_selected.tab.gz"); put("Danio rerio", "DANRE_7955_idmapping_selected.tab.gz"); put("Drosophila melanogaster", "DROME_7227_idmapping_selected.tab.gz"); put("Saccharomyces cerevisiae", "YEAST_559292_idmapping_selected.tab.gz"); } }; private static final String ENSEMBL_NAME = "ENSEMBL"; private static final String GENE_EXPRESSION_ATLAS_NAME = "Gene Expression Atlas"; private static final String HPO_NAME = "HPO"; private static final String DISGENET_NAME = "DisGeNET"; private static final String DGIDB_NAME = "DGIdb"; private static final String UNIPROT_NAME = "DGIdb"; private static final String GENOME_DATA = "genome"; private static final String GENE_DATA = "gene"; private static final String GENE_DISEASE_ASSOCIATION_DATA = "gene_disease_association"; public DownloadCommandExecutor(CliOptionsParser.DownloadCommandOptions downloadCommandOptions) { super(downloadCommandOptions.commonOptions.logLevel, downloadCommandOptions.commonOptions.verbose, downloadCommandOptions.commonOptions.conf); this.downloadCommandOptions = downloadCommandOptions; if (downloadCommandOptions.output != null) { output = Paths.get(downloadCommandOptions.output); } if (downloadCommandOptions.common != null) { common = Paths.get(downloadCommandOptions.common); } else { common = output.resolve("common"); } this.ensemblScriptsFolder = new File(System.getProperty("basedir") + "/bin/ensembl-scripts/"); } /** * Execute specific 'download' command options. */ public void execute() { try { if (downloadCommandOptions.species != null && !downloadCommandOptions.species.isEmpty()) { // We need to get the Species object from the CLI name // This can be the scientific or common name, or the ID // Species speciesToDownload = null; for (Species sp : configuration.getAllSpecies()) { if (downloadCommandOptions.species.equalsIgnoreCase(sp.getScientificName()) || downloadCommandOptions.species.equalsIgnoreCase(sp.getCommonName()) || downloadCommandOptions.species.equalsIgnoreCase(sp.getId())) { species = sp; break; } } // If everything is right we launch the download if (species != null) { processSpecies(species); } else { logger.error("Species '{}' not valid", downloadCommandOptions.species); } } else { logger.error("--species parameter '{}' not valid", downloadCommandOptions.species); } } catch (ParameterException e) { logger.error("Error in 'download' command line: " + e.getMessage()); } catch (IOException | InterruptedException e) { logger.error("Error downloading '" + downloadCommandOptions.species + "' files: " + e.getMessage()); } } private void processSpecies(Species sp) throws IOException, InterruptedException { logger.info("Processing species " + sp.getScientificName()); // We need to find which is the correct Ensembl host URL. // This can different depending on if is a vertebrate species. String ensemblHostUrl; if (configuration.getSpecies().getVertebrates().contains(sp)) { ensemblHostUrl = configuration.getDownload().getEnsembl().getUrl().getHost(); } else { ensemblHostUrl = configuration.getDownload().getEnsemblGenomes().getUrl().getHost(); } // Getting the assembly. // By default the first assembly in the configuration.json Species.Assembly assembly = null; if (downloadCommandOptions.assembly == null || downloadCommandOptions.assembly.isEmpty()) { assembly = sp.getAssemblies().get(0); } else { for (Species.Assembly assembly1 : sp.getAssemblies()) { if (downloadCommandOptions.assembly.equalsIgnoreCase(assembly1.getName())) { assembly = assembly1; break; } } } // Checking that the species and assembly are correct if (ensemblHostUrl == null || assembly == null) { logger.error("Something is not correct, check the species '{}' or the assembly '{}'", downloadCommandOptions.species, downloadCommandOptions.assembly); return; } // Output folder creation String spShortName = sp.getScientificName().toLowerCase() .replaceAll("\\.", "") .replaceAll("\\)", "") .replaceAll("\\(", "") .replaceAll("[-/]", " ") .replaceAll("\\s+", "_"); String spAssembly = assembly.getName().toLowerCase(); Path spFolder = output.resolve(spShortName + "_" + spAssembly); makeDir(spFolder); makeDir(common); ensemblVersion = assembly.getEnsemblVersion(); ensemblRelease = "release-" + ensemblVersion.split("_")[0]; if (downloadCommandOptions.data != null && !downloadCommandOptions.data.isEmpty()) { List<String> dataList; if (downloadCommandOptions.data.equals("all")) { dataList = sp.getData(); } else { dataList = Arrays.asList(downloadCommandOptions.data.split(",")); } for (String data : dataList) { switch (data) { case GENOME_DATA: downloadReferenceGenome(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl); break; case GENE_DATA: downloadEnsemblGene(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl); break; case GENE_DISEASE_ASSOCIATION_DATA: if (speciesHasInfoToDownload(sp, "gene_disease_association")) { downloadGeneDiseaseAssociation(sp, spFolder); } break; case "variation": if (speciesHasInfoToDownload(sp, "variation")) { downloadVariation(sp, spShortName, spFolder, ensemblHostUrl); } break; case "variation_functional_score": if (speciesHasInfoToDownload(sp, "variation_functional_score")) { downloadCaddScores(sp, assembly.getName(), spFolder); } break; case "regulation": if (speciesHasInfoToDownload(sp, "regulation")) { downloadRegulation(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl); } break; case "protein": if (speciesHasInfoToDownload(sp, "protein")) { downloadProtein(); } break; case "conservation": if (speciesHasInfoToDownload(sp, "conservation")) { downloadConservation(sp, assembly.getName(), spFolder); } break; case "clinical": if (speciesHasInfoToDownload(sp, "clinical")) { downloadClinical(sp, spFolder); } break; default: System.out.println("This data parameter is not allowed"); break; } } } } private boolean speciesHasInfoToDownload(Species sp, String info) { boolean hasInfo = true; if (sp.getData() == null || !sp.getData().contains(info)) { logger.warn("Species '{}' has no '{}' information available to download", sp.getScientificName(), info); hasInfo = false; } return hasInfo; } private String getPhylo(Species sp) { if (configuration.getSpecies().getVertebrates().contains(sp)) { return "vertebrates"; } else if (configuration.getSpecies().getMetazoa().contains(sp)) { return "metazoa"; } else if (configuration.getSpecies().getFungi().contains(sp)) { return "fungi"; } else if (configuration.getSpecies().getProtist().contains(sp)) { return "protists"; } else if (configuration.getSpecies().getPlants().contains(sp)) { return "plants"; } else { throw new ParameterException("Species " + sp.getScientificName() + " not associated to any phylo in the configuration file"); } } private void downloadReferenceGenome(Species sp, String shortName, String assembly, Path spFolder, String host) throws IOException, InterruptedException { logger.info("Downloading genome information ..."); Path sequenceFolder = spFolder.resolve("genome"); makeDir(sequenceFolder); /** * Reference genome sequences are downloaded from Ensembl */ String url = host + "/" + ensemblRelease; if (sp.getScientificName().equals("Homo sapiens")) { // New Homo sapiens assemblies contain too many ALT regions, // so we download 'primary_assembly' file url = url + "/fasta/" + shortName + "/dna/*.dna.primary_assembly.fa.gz"; } else { if (!configuration.getSpecies().getVertebrates().contains(sp)) { url = host + "/" + ensemblRelease + "/" + getPhylo(sp); } url = url + "/fasta/" + shortName + "/dna/*.dna.toplevel.fa.gz"; } String outputFileName = StringUtils.capitalize(shortName) + "." + assembly + ".fa.gz"; Path outputPath = sequenceFolder.resolve(outputFileName); downloadFile(url, outputPath.toString()); logger.info("Saving reference genome version data at {}", sequenceFolder.resolve("version.json")); saveVersionData(GENOME_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), Collections.singletonList(url), sequenceFolder.resolve("version.json")); } private String getTimeStamp() { return new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()); } private void saveVersionData(String data, String source, String version, String date, List<String> url, Path outputFilePath) { Map versionData = new HashedMap(); versionData.put("Data", data); versionData.put("Source", source); versionData.put("Version", version); versionData.put("Download date", date); versionData.put("URL", url); writeVersionDataFile(versionData, outputFilePath); } private void writeVersionDataFile(Map versionData, Path outputFilePath) { try { OutputStream os = Files.newOutputStream(outputFilePath); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(os)); ObjectMapper jsonObjectMapper = new ObjectMapper(); ObjectWriter jsonObjectWriter = jsonObjectMapper.writer(); bw.write(jsonObjectWriter.writeValueAsString(versionData) + "\n"); } catch (IOException e) { e.printStackTrace(); } } private void downloadEnsemblGene(Species sp, String spShortName, String assembly, Path speciesFolder, String host) throws IOException, InterruptedException { logger.info("Downloading gene information ..."); Path geneFolder = speciesFolder.resolve("gene"); makeDir(geneFolder); downloadEnsemblData(sp, spShortName, geneFolder, host); downloadDrugData(sp, speciesFolder); downloadGeneUniprotXref(sp, geneFolder); downloadGeneExpressionAtlas(); downloadGeneDiseaseAnnotation(geneFolder); runGeneExtraInfo(sp, assembly, geneFolder); } private void downloadDrugData(Species species, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens")) { logger.info("Downloading drug-gene data..."); Path geneDrugFolder = speciesFolder.resolve("gene/geneDrug"); makeDir(geneDrugFolder); String url = configuration.getDownload().getDgidb().getHost(); downloadFile(url, geneDrugFolder.resolve("dgidb.tsv").toString()); saveVersionData(GENE_DATA, DGIDB_NAME, null, getTimeStamp(), Collections.singletonList(url), geneDrugFolder.resolve("dgidbVersion.json")); } } private void downloadEnsemblData(Species sp, String spShortName, Path geneFolder, String host) throws IOException, InterruptedException { logger.info("Downloading gene Ensembl data (gtf, pep, cdna, motifs) ..."); List<String> downloadedUrls = new ArrayList<>(4); String ensemblHost = host + "/" + ensemblRelease; if (!configuration.getSpecies().getVertebrates().contains(sp)) { ensemblHost = host + "/" + ensemblRelease + "/" + getPhylo(sp); } // Ensembl leaves now several GTF files in the FTP folder, we need to build a more accurate URL // to download the correct GTF file. String version = ensemblRelease.split("-")[1]; String url = ensemblHost + "/gtf/" + spShortName + "/*" + version + ".gtf.gz"; String fileName = geneFolder.resolve(spShortName + ".gtf.gz").toString(); downloadFile(url, fileName); downloadedUrls.add(url); url = ensemblHost + "/fasta/" + spShortName + "/pep/*.pep.all.fa.gz"; fileName = geneFolder.resolve(spShortName + ".pep.all.fa.gz").toString(); downloadFile(url, fileName); downloadedUrls.add(url); url = ensemblHost + "/fasta/" + spShortName + "/cdna/*.cdna.all.fa.gz"; fileName = geneFolder.resolve(spShortName + ".cdna.all.fa.gz").toString(); downloadFile(url, fileName); downloadedUrls.add(url); url = ensemblHost + "/regulation/" + spShortName + "/MotifFeatures.gff.gz"; Path outputFile = geneFolder.resolve("MotifFeatures.gff.gz"); downloadFile(url, outputFile.toString()); downloadedUrls.add(url); saveVersionData(GENE_DATA, ENSEMBL_NAME, ensemblVersion, getTimeStamp(), downloadedUrls, geneFolder.resolve("ensemblVersion.json")); } private void downloadGeneUniprotXref(Species sp, Path geneFolder) throws IOException, InterruptedException { logger.info("Downloading UniProt ID mapping ..."); if (GENE_UNIPROT_XREF_FILES.containsKey(sp.getScientificName())) { String geneGtfUrl = configuration.getDownload().getGeneUniprotXref().getHost() + "/" + GENE_UNIPROT_XREF_FILES.get(sp.getScientificName()); downloadFile(geneGtfUrl, geneFolder.resolve("idmapping_selected.tab.gz").toString()); downloadFile(getUniProtReleaseNotesUrl(), geneFolder.resolve("uniprotRelnotes.txt").toString()); saveVersionData(GENE_DATA, UNIPROT_NAME, getUniProtRelease(geneFolder.resolve("uniprotRelnotes.txt").toString()), getTimeStamp(), Collections.singletonList(geneGtfUrl), geneFolder.resolve("uniprotVersion.json")); } } private String getUniProtRelease(String relnotesFilename) { Path path = Paths.get(relnotesFilename); Files.exists(path); try { // The first line at the relnotes.txt file contains the UniProt release BufferedReader reader = Files.newBufferedReader(path, Charset.defaultCharset()); String release = reader.readLine().split(" ")[2]; reader.close(); return release; } catch (IOException e) { e.printStackTrace(); } return null; } private String getUniProtReleaseNotesUrl() { return URI.create(configuration.getDownload().getGeneUniprotXref().getHost()).resolve("../../../").toString() + "/relnotes.txt"; } private void downloadGeneExpressionAtlas() throws IOException, InterruptedException { logger.info("Downloading gene expression atlas ..."); // Path expression = geneFolder.getParent().resolve("common").resolve("expression"); Path expression = common.resolve("expression"); if (!Files.exists(expression)) { makeDir(expression); String geneGtfUrl = configuration.getDownload().getGeneExpressionAtlas().getHost(); downloadFile(geneGtfUrl, expression.resolve("allgenes_updown_in_organism_part.tab.gz").toString()); saveVersionData(GENE_DATA, GENE_EXPRESSION_ATLAS_NAME, getGeneExpressionAtlasVersion(), getTimeStamp(), Collections.singletonList(geneGtfUrl), expression.resolve("geneExpressionAtlasVersion.json")); } } private String getGeneExpressionAtlasVersion() { return FilenameUtils.getBaseName(configuration.getDownload().getGeneExpressionAtlas().getHost()) .split("_")[5].replace(".tab.gz", ""); } private void downloadGeneDiseaseAnnotation(Path geneFolder) throws IOException, InterruptedException { logger.info("Downloading gene disease annotation ..."); String host = configuration.getDownload().getHpo().getHost(); String fileName = StringUtils.substringAfterLast(host, "/"); downloadFile(host, geneFolder.resolve(fileName).toString()); saveVersionData(GENE_DATA, HPO_NAME, null, getTimeStamp(), Collections.singletonList(host), geneFolder.resolve("hpoVersion.json")); host = configuration.getDownload().getDisgenet().getHost(); String readme = configuration.getDownload().getDisgenetReadme().getHost(); fileName = StringUtils.substringAfterLast(host, "/"); downloadFile(host, geneFolder.resolve(fileName).toString()); downloadFile(readme, geneFolder.resolve("disgenetReadme.txt").toString()); saveVersionData(GENE_DISEASE_ASSOCIATION_DATA, DISGENET_NAME, getDisgenetVersion(geneFolder.resolve("disgenetReadme.txt")), getTimeStamp(), Collections.singletonList(host), geneFolder.resolve("disgenetVersion.json")); } private String getDisgenetVersion(Path path) { Files.exists(path); try { BufferedReader reader = Files.newBufferedReader(path, Charset.defaultCharset()); String line = reader.readLine(); // There shall be a line at the README.txt containing the version. // e.g. The files in the current directory contain the data corresponding to the latest release (version 4.0, April 2016). ... while (line != null) { if (line.matches("\\(version")) { return line.split("\\(")[1].split("\\)")[0]; } } } catch (IOException e) { e.printStackTrace(); } return null; } private void runGeneExtraInfo(Species sp, String assembly, Path geneFolder) throws IOException, InterruptedException { logger.info("Downloading gene extra info ..."); String geneExtraInfoLogFile = geneFolder.resolve("gene_extra_info.log").toString(); List<String> args = new ArrayList<>(); if (sp.getScientificName().equals("Homo sapiens") && assembly.equalsIgnoreCase("GRCh37")) { args.addAll(Arrays.asList("--species", sp.getScientificName(), "--outdir", geneFolder.toAbsolutePath().toString(), "--ensembl-libs", configuration.getDownload().getEnsembl().getLibs() .replace("79", "75"))); } else { args.addAll(Arrays.asList("--species", sp.getScientificName(), "--outdir", geneFolder.toAbsolutePath().toString(), "--ensembl-libs", configuration.getDownload().getEnsembl().getLibs())); } if (!configuration.getSpecies().getVertebrates().contains(species) && !species.getScientificName().equals("Drosophila melanogaster")) { args.add("--phylo"); args.add("no-vertebrate"); } // run gene_extra_info.pl boolean geneExtraInfoDownloaded = runCommandLineProcess(ensemblScriptsFolder, "./gene_extra_info.pl", args, geneExtraInfoLogFile); // check output if (geneExtraInfoDownloaded) { logger.info("Gene extra files created OK"); } else { logger.error("Gene extra info for " + sp.getScientificName() + " cannot be downloaded"); } } private void downloadVariation(Species sp, String shortName, Path spFolder, String host) throws IOException, InterruptedException { logger.info("Downloading variation information ..."); Path variationFolder = spFolder.resolve("variation"); makeDir(variationFolder); String variationUrl = host + "/" + ensemblRelease; if (!configuration.getSpecies().getVertebrates().contains(sp)) { variationUrl = host + "/" + ensemblRelease + "/" + getPhylo(sp); } variationUrl = variationUrl + "/mysql/" + shortName + "_variation_" + ensemblVersion; for (String variationFile : VARIATION_FILES) { Path outputFile = variationFolder.resolve(variationFile); downloadFile(variationUrl + "/" + variationFile, outputFile.toString()); } } private void downloadRegulation(Species species, String shortName, String assembly, Path speciesFolder, String host) throws IOException, InterruptedException { logger.info("Downloading regulation information ..."); Path regulationFolder = speciesFolder.resolve("regulation"); makeDir(regulationFolder); // Downloading Ensembl Regulation String regulationUrl = host + "/" + ensemblRelease; if (!configuration.getSpecies().getVertebrates().contains(species)) { regulationUrl = host + "/" + ensemblRelease + "/" + getPhylo(species); } regulationUrl = regulationUrl + "/regulation/" + shortName; for (String regulationFile : REGULATION_FILES) { Path outputFile = regulationFolder.resolve(regulationFile); downloadFile(regulationUrl + "/" + regulationFile, outputFile.toString()); } // Downloading miRNA info String url; Path mirbaseFolder = common.resolve("mirbase"); if (!Files.exists(mirbaseFolder)) { makeDir(mirbaseFolder); url = configuration.getDownload().getMirbase().getHost() + "/miRNA.xls.gz"; downloadFile(url, mirbaseFolder.resolve("miRNA.xls.gz").toString()); url = configuration.getDownload().getMirbase().getHost() + "/aliases.txt.gz"; downloadFile(url, mirbaseFolder.resolve("aliases.txt.gz").toString()); } if (species.getScientificName().equals("Homo sapiens")) { if (assembly.equalsIgnoreCase("GRCh37")) { url = configuration.getDownload().getTargetScan().getHost() + "/hg19/database/targetScanS.txt.gz"; downloadFile(url, regulationFolder.resolve("targetScanS.txt.gz").toString()); url = configuration.getDownload().getMiRTarBase().getHost() + "/hsa_MTI.xls"; downloadFile(url, regulationFolder.resolve("hsa_MTI.xls").toString()); } } if (species.getScientificName().equals("Mus musculus")) { url = configuration.getDownload().getTargetScan().getHost() + "/mm9/database/targetScanS.txt.gz"; downloadFile(url, regulationFolder.resolve("targetScanS.txt.gz").toString()); url = configuration.getDownload().getMiRTarBase().getHost() + "/mmu_MTI.xls"; downloadFile(url, regulationFolder.resolve("mmu_MTI.xls").toString()); } } /** * This method downloads UniProt, IntAct and Interpro data from EMBL-EBI. * * @throws IOException * @throws InterruptedException */ private void downloadProtein() throws IOException, InterruptedException { logger.info("Downloading protein information ..."); Path proteinFolder = common.resolve("protein"); if (!Files.exists(proteinFolder)) { makeDir(proteinFolder); String url = configuration.getDownload().getUniprot().getHost(); downloadFile(url, proteinFolder.resolve("uniprot_sprot.xml.gz").toString()); makeDir(proteinFolder.resolve("uniprot_chunks")); splitUniprot(proteinFolder.resolve("uniprot_sprot.xml.gz"), proteinFolder.resolve("uniprot_chunks")); url = configuration.getDownload().getIntact().getHost(); downloadFile(url, proteinFolder.resolve("intact.txt").toString()); url = configuration.getDownload().getInterpro().getHost(); downloadFile(url, proteinFolder.resolve("protein2ipr.dat.gz").toString()); } else { logger.info("Protein: skipping this since it is already downloaded. Delete 'protein' folder to force download"); } } private void splitUniprot(Path uniprotFilePath, Path splitOutdirPath) throws IOException { BufferedReader br = FileUtils.newBufferedReader(uniprotFilePath); PrintWriter pw = null; StringBuilder header = new StringBuilder(); boolean beforeEntry = true; boolean inEntry = false; int count = 0; int chunk = 0; String line; while ((line = br.readLine()) != null) { if (line.trim().startsWith("<entry ")) { inEntry = true; beforeEntry = false; if (count % 10000 == 0) { pw = new PrintWriter(new FileOutputStream(splitOutdirPath.resolve("chunk_" + chunk + ".xml").toFile())); pw.println(header.toString().trim()); } count++; } if (beforeEntry) { header.append(line).append("\n"); } if (inEntry) { pw.println(line); } if (line.trim().startsWith("</entry>")) { inEntry = false; if (count % 10000 == 0) { pw.print("</uniprot>"); pw.close(); chunk++; } } } pw.print("</uniprot>"); pw.close(); br.close(); } /** * This method downloads bith PhastCons and PhyloP data from UCSC for Human and Mouse species. * * @param species The Species object to download the data * @param assembly The assembly required * @param speciesFolder Output folder to download the data * @throws IOException * @throws InterruptedException */ private void downloadConservation(Species species, String assembly, Path speciesFolder) throws IOException, InterruptedException { logger.info("Downloading conservation information ..."); Path conservationFolder = speciesFolder.resolve("conservation"); if (species.getScientificName().equals("Homo sapiens")) { makeDir(conservationFolder); makeDir(conservationFolder.resolve("phastCons")); makeDir(conservationFolder.resolve("phylop")); makeDir(conservationFolder.resolve("gerp")); String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "X", "Y", "M", }; if (assembly.equalsIgnoreCase("GRCh37")) { logger.debug("Downloading GERP++ ..."); downloadFile(configuration.getDownload().getGerp().getHost(), conservationFolder.resolve("gerp/hg19.GERP_scores.tar.gz").toAbsolutePath().toString()); String url = configuration.getDownload().getConservation().getHost() + "/hg19"; for (int i = 0; i < chromosomes.length; i++) { String phastConsUrl = url + "/phastCons46way/primates/chr" + chromosomes[i] + ".phastCons46way.primates.wigFix.gz"; downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i] + ".phastCons46way.primates.wigFix.gz").toString()); String phyloPUrl = url + "/phyloP46way/primates/chr" + chromosomes[i] + ".phyloP46way.primate.wigFix.gz"; downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i] + ".phyloP46way.primate.wigFix.gz").toString()); } } if (assembly.equalsIgnoreCase("GRCh38")) { String url = configuration.getDownload().getConservation().getHost() + "/hg38"; for (int i = 0; i < chromosomes.length; i++) { String phastConsUrl = url + "/phastCons100way/hg38.100way.phastCons/chr" + chromosomes[i] + ".phastCons100way.wigFix.gz"; downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i] + ".phastCons100way.wigFix.gz").toString()); String phyloPUrl = url + "/phyloP100way/hg38.100way.phyloP100way/chr" + chromosomes[i] + ".phyloP100way.wigFix.gz"; downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i] + ".phyloP100way.wigFix.gz").toString()); } // String phastConsUrl = url + "/phastCons7way/hg38.phastCons100way.wigFix.gz"; // Path outFile = conservationFolder.resolve("phastCons").resolve("hg38.phastCons100way.wigFix.gz"); // downloadFile(phastConsUrl, outFile.toString()); // // String phyloPUrl = url + "/phyloP7way/hg38.phyloP100way.wigFix.gz"; // outFile = conservationFolder.resolve("phylop").resolve("hg38.phyloP100way.wigFix.gz"); // downloadFile(phyloPUrl, outFile.toString()); } } if (species.getScientificName().equals("Mus musculus")) { makeDir(conservationFolder); makeDir(conservationFolder.resolve("phastCons")); makeDir(conservationFolder.resolve("phylop")); String url = configuration.getDownload().getConservation().getHost() + "/mm10"; String[] chromosomes = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "X", "Y", "M", }; for (int i = 0; i < chromosomes.length; i++) { String phastConsUrl = url + "/phastCons60way/mm10.60way.phastCons/chr" + chromosomes[i] + ".phastCons60way.wigFix.gz"; downloadFile(phastConsUrl, conservationFolder.resolve("phastCons").resolve("chr" + chromosomes[i] + ".phastCons60way.wigFix.gz").toString()); String phyloPUrl = url + "/phyloP60way/mm10.60way.phyloP60way/chr" + chromosomes[i] + ".phyloP60way.wigFix.gz"; downloadFile(phyloPUrl, conservationFolder.resolve("phylop").resolve("chr" + chromosomes[i] + ".phyloP60way.wigFix.gz").toString()); } } } private void downloadClinical(Species species, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens")) { logger.info("Downloading clinical information ..."); Path clinicalFolder = speciesFolder.resolve("clinical"); makeDir(clinicalFolder); String url = configuration.getDownload().getClinvar().getHost(); downloadFile(url, clinicalFolder.resolve("ClinVar.xml.gz").toString()); url = configuration.getDownload().getClinvarEfoTerms().getHost(); downloadFile(url, clinicalFolder.resolve("ClinVar_Traits_EFO_Names.csv").toString()); url = configuration.getDownload().getClinvarSummary().getHost(); downloadFile(url, clinicalFolder.resolve("variant_summary.txt.gz").toString()); url = configuration.getDownload().getGwasCatalog().getHost(); downloadFile(url, clinicalFolder.resolve("gwas_catalog.tsv").toString()); url = configuration.getDownload().getDbsnp().getHost(); downloadFile(url, clinicalFolder.resolve("All.vcf.gz").toString()); url = url + ".tbi"; downloadFile(url, clinicalFolder.resolve("All.vcf.gz.tbi").toString()); } } private void downloadGeneDiseaseAssociation(Species species, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens")) { logger.info("Downloading gene to disease information ..."); Path gene2diseaseFolder = speciesFolder.resolve("gene_disease_association"); makeDir(gene2diseaseFolder); // Downloads DisGeNET String url = configuration.getDownload().getDisgenet().getHost(); String readmeUrl = configuration.getDownload().getDisgenetReadme().getHost(); downloadFile(url, gene2diseaseFolder.resolve("all_gene_disease_associations.txt.gz").toString()); downloadFile(readmeUrl, gene2diseaseFolder.resolve("disgenetReadme.txt").toString()); saveVersionData(GENE_DISEASE_ASSOCIATION_DATA, DISGENET_NAME, getDisgenetVersion(gene2diseaseFolder.resolve("disgenetReadme.txt")), getTimeStamp(), Collections.singletonList(url), gene2diseaseFolder.resolve("disgenetVersion.json")); // Downloads HPO url = configuration.getDownload().getHpo().getHost(); downloadFile(url, gene2diseaseFolder.resolve("ALL_SOURCES_ALL_FREQUENCIES_diseases_to_genes_to_phenotypes.txt").toString()); saveVersionData(GENE_DISEASE_ASSOCIATION_DATA, HPO_NAME, null, getTimeStamp(), Collections.singletonList(url), gene2diseaseFolder.resolve("hpoVersion.json")); } } private void downloadCaddScores(Species species, String assembly, Path speciesFolder) throws IOException, InterruptedException { if (species.getScientificName().equals("Homo sapiens") && assembly.equalsIgnoreCase("GRCh37")) { logger.info("Downloading CADD scores information ..."); Path variationFunctionalScoreFolder = speciesFolder.resolve("variation_functional_score"); makeDir(variationFunctionalScoreFolder); // Downloads CADD scores String url = configuration.getDownload().getCadd().getHost(); downloadFile(url, variationFunctionalScoreFolder.resolve("whole_genome_SNVs.tsv.gz").toString()); } } private void downloadFile(String url, String outputFileName) throws IOException, InterruptedException { List<String> wgetArgs = Arrays.asList("--tries=10", url, "-O", outputFileName, "-o", outputFileName + ".log"); boolean downloaded = runCommandLineProcess(null, "wget", wgetArgs, null); if (downloaded) { logger.info(outputFileName + " created OK"); } else { logger.warn(url + " cannot be downloaded"); } } }
versioninfo: new variation and variation_functional_score versions data are now saved when downloading
cellbase-app/src/main/java/org/opencb/cellbase/app/cli/DownloadCommandExecutor.java
versioninfo: new variation and variation_functional_score versions data are now saved when downloading
Java
apache-2.0
5b9fc01c20db6d55011753ae0795bf4973497646
0
maxrp/autopsy,wschaeferB/autopsy,APriestman/autopsy,esaunders/autopsy,APriestman/autopsy,rcordovano/autopsy,eXcomm/autopsy,mhmdfy/autopsy,wschaeferB/autopsy,millmanorama/autopsy,eXcomm/autopsy,sidheshenator/autopsy,eXcomm/autopsy,maxrp/autopsy,karlmortensen/autopsy,APriestman/autopsy,maxrp/autopsy,narfindustries/autopsy,APriestman/autopsy,APriestman/autopsy,karlmortensen/autopsy,karlmortensen/autopsy,mhmdfy/autopsy,sidheshenator/autopsy,rcordovano/autopsy,millmanorama/autopsy,millmanorama/autopsy,esaunders/autopsy,raman-bt/autopsy,rcordovano/autopsy,APriestman/autopsy,rcordovano/autopsy,rcordovano/autopsy,raman-bt/autopsy,mhmdfy/autopsy,dgrove727/autopsy,rcordovano/autopsy,wschaeferB/autopsy,esaunders/autopsy,mhmdfy/autopsy,eXcomm/autopsy,esaunders/autopsy,dgrove727/autopsy,esaunders/autopsy,raman-bt/autopsy,raman-bt/autopsy,raman-bt/autopsy,maxrp/autopsy,wschaeferB/autopsy,narfindustries/autopsy,dgrove727/autopsy,raman-bt/autopsy,APriestman/autopsy,wschaeferB/autopsy,narfindustries/autopsy,karlmortensen/autopsy,sidheshenator/autopsy,raman-bt/autopsy,sidheshenator/autopsy,millmanorama/autopsy
/* * Autopsy Forensic Browser * * Copyright 2011 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.keywordsearch; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import javax.swing.Timer; import org.apache.commons.lang.StringEscapeUtils; import org.apache.solr.client.solrj.SolrServerException; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; import org.openide.util.Cancellable; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; import org.sleuthkit.autopsy.ingest.IngestModuleInit; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ContentVisitor; import org.sleuthkit.datamodel.File; import org.sleuthkit.datamodel.FsContent; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData.FileKnown; /** * An ingest module on a file level Performs indexing of allocated and Solr * supported files, string extraction and indexing of unallocated and not Solr * supported files Index commit is done periodically (determined by user set * ingest update interval) Runs a periodic keyword / regular expression search * on currently configured lists for ingest and writes results to blackboard * Reports interesting events to Inbox and to viewers * * Registered as a module in layer.xml */ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile { private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName()); public static final String MODULE_NAME = "Keyword Search"; public static final String MODULE_DESCRIPTION = "Performs file indexing and periodic search using keywords and regular expressions in lists."; private static KeywordSearchIngestModule instance = null; private IngestServices services; private Ingester ingester = null; private volatile boolean commitIndex = false; //whether to commit index next time private volatile boolean runSearcher = false; //whether to run searcher next time private List<Keyword> keywords; //keywords to search private List<String> keywordLists; // lists currently being searched private Map<String, KeywordSearchList> keywordToList; //keyword to list name mapping private Timer commitTimer; private Timer searchTimer; //private static final int COMMIT_INTERVAL_MS = 10 * 60 * 1000; private Indexer indexer; private Searcher currentSearcher; private Searcher finalSearcher; private volatile boolean searcherDone = true; //mark as done, until it's inited private Map<Keyword, List<Long>> currentResults; private static final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy private static final Lock searcherLock = rwLock.writeLock(); private volatile int messageID = 0; private boolean processedFiles; private volatile boolean finalSearcherDone = true; //mark as done, until it's inited private final String hashDBModuleName = "Hash Lookup"; //NOTE this needs to match the HashDB module getName() private SleuthkitCase caseHandle = null; private boolean skipKnown = true; private boolean initialized = false; private List<AbstractFileExtract> textExtractors; private AbstractFileStringExtract stringExtractor; private final List<SCRIPT> stringExtractScripts = new ArrayList<SCRIPT>(); private Map<String,String> stringExtractOptions = new HashMap<String,String>(); private final GetIsFileKnownV getIsFileKnown = new GetIsFileKnownV(); private enum IngestStatus { INGESTED, EXTRACTED_INGESTED, SKIPPED, INGESTED_META }; private Map<Long, IngestStatus> ingestStatus; //private constructor to ensure singleton instance private KeywordSearchIngestModule() { //set default script stringExtractScripts.add(SCRIPT.LATIN_1); stringExtractOptions.put(AbstractFileExtract.ExtractOptions.EXTRACT_UTF8.toString(), Boolean.TRUE.toString()); stringExtractOptions.put(AbstractFileExtract.ExtractOptions.EXTRACT_UTF16.toString(), Boolean.TRUE.toString()); } /** * Returns singleton instance of the module, creates one if needed * * @return instance of the module */ public static synchronized KeywordSearchIngestModule getDefault() { if (instance == null) { instance = new KeywordSearchIngestModule(); } return instance; } /** * Starts processing of every file provided by IngestManager. Checks if it * is time to commit and run search * * @param abstractFile file/unallocated file/directory to process * @return ProcessResult.OK in most cases and ERROR only if error in the * pipeline, otherwise does not advice to stop the pipeline */ @Override public ProcessResult process(AbstractFile abstractFile) { if (initialized == false) //error initializing indexing/Solr { return ProcessResult.OK; } //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it IngestModuleAbstractFile.ProcessResult hashDBResult = services.getAbstractFileModuleResult(hashDBModuleName); //logger.log(Level.INFO, "hashdb result: " + hashDBResult + "file: " + AbstractFile.getName()); if (hashDBResult == IngestModuleAbstractFile.ProcessResult.ERROR) { //index meta-data only indexer.indexFile(abstractFile, false); //notify depending module that keyword search (would) encountered error for this file return ProcessResult.ERROR; } else if (skipKnown && abstractFile.accept(getIsFileKnown) == true) { //index meta-data only indexer.indexFile(abstractFile, false); return ProcessResult.OK; } if (processedFiles == false) { processedFiles = true; } //check if it's time to commit after previous processing checkRunCommitSearch(); //index the file and content (if the content is supported) indexer.indexFile(abstractFile, true); return ProcessResult.OK; } /** * Process content hierarchy and return true if content is a file and is set as known */ private class GetIsFileKnownV extends ContentVisitor.Default<Boolean> { @Override protected Boolean defaultVisit(Content cntnt) { return false; } @Override public Boolean visit(File file) { return file.getKnown() == FileKnown.KNOWN; } } /** * After all files are ingested, execute final index commit and final search * Cleanup resources, threads, timers */ @Override public void complete() { if (initialized == false) { return; } //logger.log(Level.INFO, "complete()"); commitTimer.stop(); //handle case if previous search running //cancel it, will re-run after final commit //note: cancellation of Searcher worker is graceful (between keywords) if (currentSearcher != null) { currentSearcher.cancel(false); } //cancel searcher timer, ensure unwanted searcher does not start //before we start the final one if (searchTimer.isRunning()) { searchTimer.stop(); } runSearcher = false; logger.log(Level.INFO, "Running final index commit and search"); //final commit commit(); postIndexSummary(); //run one last search as there are probably some new files committed if (keywords != null && !keywords.isEmpty() && processedFiles == true) { finalSearcher = new Searcher(keywords, true); //final searcher run finalSearcher.execute(); } else { finalSearcherDone = true; services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Completed")); } //log number of files / chunks in index //signal a potential change in number of indexed files try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); } catch (SolrServerException se) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); } //postSummary(); } /** * Handle stop event (ingest interrupted) Cleanup resources, threads, timers */ @Override public void stop() { logger.log(Level.INFO, "stop()"); //stop timer commitTimer.stop(); //stop currentSearcher if (currentSearcher != null) { currentSearcher.cancel(true); } //cancel searcher timer, ensure unwanted searcher does not start if (searchTimer.isRunning()) { searchTimer.stop(); } runSearcher = false; finalSearcherDone = true; //commit uncommited files, don't search again commit(); //postSummary(); } @Override public String getName() { return MODULE_NAME; } @Override public String getDescription() { return MODULE_DESCRIPTION; } /** * Initializes the module for new ingest run Sets up threads, timers, * retrieves settings, keyword lists to run on * * @param services */ @Override public void init(IngestModuleInit initContext) { logger.log(Level.INFO, "init()"); services = IngestServices.getDefault(); initialized = false; caseHandle = Case.getCurrentCase().getSleuthkitCase(); ingester = Server.getIngester(); //initialize extractors stringExtractor = new AbstractFileStringExtract(); stringExtractor.setScripts(stringExtractScripts); stringExtractor.setOptions(stringExtractOptions); //log the scripts used for debugging final StringBuilder sbScripts = new StringBuilder(); for (SCRIPT s : stringExtractScripts) { sbScripts.append(s.name()).append(" "); } logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); textExtractors = new ArrayList<AbstractFileExtract>(); //order matters, more specific extractors first textExtractors.add(new AbstractFileHtmlExtract()); textExtractors.add(new AbstractFileTikaTextExtract()); ingestStatus = new HashMap<Long, IngestStatus>(); keywords = new ArrayList<Keyword>(); keywordLists = new ArrayList<String>(); keywordToList = new HashMap<String, KeywordSearchList>(); initKeywords(); if (keywords.isEmpty() || keywordLists.isEmpty()) { services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, "No keywords in keyword list.", "Only indexing will be done and and keyword search will be skipped (it can be executed later again as ingest or using toolbar search feature).")); } processedFiles = false; finalSearcherDone = false; searcherDone = true; //make sure to start the initial currentSearcher //keeps track of all results per run not to repeat reporting the same hits currentResults = new HashMap<Keyword, List<Long>>(); indexer = new Indexer(); final int updateIntervalMs = services.getUpdateFrequency() * 60 * 1000; logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); initialized = true; commitTimer.start(); searchTimer.start(); services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Started")); } @Override public ModuleType getType() { return ModuleType.AbstractFile; } @Override public boolean hasSimpleConfiguration() { return true; } @Override public boolean hasAdvancedConfiguration() { return true; } @Override public javax.swing.JPanel getSimpleConfiguration() { return new KeywordSearchIngestSimplePanel(); } @Override public javax.swing.JPanel getAdvancedConfiguration() { return KeywordSearchConfigurationPanel.getDefault(); } @Override public void saveAdvancedConfiguration() { } @Override public void saveSimpleConfiguration() { } /** * The modules maintains background threads, return true if background * threads are running or there are pending tasks to be run in the future, * such as the final search post-ingest completion * * @return */ @Override public boolean hasBackgroundJobsRunning() { if ((currentSearcher != null && searcherDone == false) || (finalSearcherDone == false)) { return true; } else { return false; } } /** * Commits index and notifies listeners of index update */ private void commit() { if (initialized) { logger.log(Level.INFO, "Commiting index"); ingester.commit(); logger.log(Level.INFO, "Index comitted"); //signal a potential change in number of indexed files indexChangeNotify(); } } /** * Posts inbox message with summary of indexed files */ private void postIndexSummary() { int indexed = 0; int indexed_meta = 0; int indexed_extr = 0; int skipped = 0; for (IngestStatus s : ingestStatus.values()) { switch (s) { case INGESTED: ++indexed; break; case INGESTED_META: ++indexed_meta; break; case EXTRACTED_INGESTED: ++indexed_extr; break; case SKIPPED: ++skipped; break; default: ; } } StringBuilder msg = new StringBuilder(); msg.append("Indexed files: ").append(indexed).append("<br />Indexed strings: ").append(indexed_extr); msg.append("<br />Indexed meta-data only: ").append(indexed_meta).append("<br />"); msg.append("<br />Skipped files: ").append(skipped).append("<br />"); String indexStats = msg.toString(); logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Keyword Indexing Completed", indexStats)); } /** * Helper method to notify listeners on index update */ private void indexChangeNotify() { //signal a potential change in number of indexed files try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles)); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); } catch (SolrServerException se) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); } } /** * Initialize the keyword search lists from the XML loader */ private void initKeywords() { KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); keywords.clear(); keywordLists.clear(); keywordToList.clear(); for (KeywordSearchList list : loader.getListsL()) { String listName = list.getName(); if (list.getUseForIngest()) { keywordLists.add(listName); } for (Keyword keyword : list.getKeywords()) { keywords.add(keyword); keywordToList.put(keyword.getQuery(), list); } } } List<String> getKeywordLists() { return keywordLists == null ? new ArrayList<String>() : keywordLists; } /** * Check if time to commit, if so, run commit. Then run search if search * timer is also set. */ void checkRunCommitSearch() { if (commitIndex) { logger.log(Level.INFO, "Commiting index"); commit(); commitIndex = false; //after commit, check if time to run searcher //NOTE commit/searcher timings don't need to align //in worst case, we will run search next time after commit timer goes off, or at the end of ingest if (searcherDone && runSearcher) { //start search if previous not running if (keywords != null && !keywords.isEmpty()) { currentSearcher = new Searcher(keywords); currentSearcher.execute();//searcher will stop timer and restart timer when done } } } } /** * CommitTimerAction to run by commitTimer Sets a flag to indicate we are * ready for commit */ private class CommitTimerAction implements ActionListener { private final Logger logger = Logger.getLogger(CommitTimerAction.class.getName()); @Override public void actionPerformed(ActionEvent e) { commitIndex = true; logger.log(Level.INFO, "CommitTimer awake"); } } /** * SearchTimerAction to run by searchTimer Sets a flag to indicate we are * ready to search */ private class SearchTimerAction implements ActionListener { private final Logger logger = Logger.getLogger(SearchTimerAction.class.getName()); @Override public void actionPerformed(ActionEvent e) { runSearcher = true; logger.log(Level.INFO, "SearchTimer awake"); } } /** * File indexer, processes and indexes known/allocated files, * unknown/unallocated files and directories accordingly */ private class Indexer { private final Logger logger = Logger.getLogger(Indexer.class.getName()); /** * Extract strings or text with Tika (by streaming) from the file Divide * the file into chunks and index the chunks * * @param aFile file to extract strings from, divide into chunks and * index * @param stringsOnly true if use string extraction, false if to use a * content-type specific text extractor * @return true if the file was indexed, false otherwise * @throws IngesterException exception thrown if indexing failed */ private boolean extractIndex(AbstractFile aFile, boolean stringsOnly) throws IngesterException { AbstractFileExtract fileExtract = null; if (stringsOnly) { fileExtract = stringExtractor; } else { //go over available text extractors and pick the first one (most specific one) for (AbstractFileExtract fe : textExtractors) { if (fe.isSupported(aFile)) { fileExtract = fe; break; } } } if (fileExtract == null) { throw new IngesterException("No supported file extractor found for file: " + aFile.getId() + " " + aFile.getName()); } //logger.log(Level.INFO, "Extractor: " + fileExtract + ", file: " + aFile.getName()); //divide into chunks and index return fileExtract.index(aFile); } private boolean isTextExtractSupported(AbstractFile aFile) { for (AbstractFileExtract extractor : textExtractors) { if (extractor.isContentTypeSpecific() == true && extractor.isSupported(aFile)) { return true; } } return false; } private void indexFile(AbstractFile aFile, boolean indexContent) { //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); FsContent fsContent = null; //check if alloc fs file or dir TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { //skip indexing of virtual dirs (no content, no real name) - will index children files return; } else if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { fsContent = (FsContent) aFile; } final long size = aFile.getSize(); //if alloc fs file and not to index content, or a dir, or 0 content, index meta data only if (fsContent != null && (indexContent == false || fsContent.isDir() || size == 0)) { try { ingester.ingest(fsContent, false); //meta-data only ingestStatus.put(aFile.getId(), IngestStatus.INGESTED_META); } catch (IngesterException ex) { ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); logger.log(Level.WARNING, "Unable to index meta-data for fsContent: " + fsContent.getId(), ex); } return; } boolean extractTextSupported = isTextExtractSupported(aFile); if (fsContent != null && extractTextSupported) { //we know it's an allocated FS file (since it's FsContent) //extract text with one of the extractors, divide into chunks and index with Solr try { //logger.log(Level.INFO, "indexing: " + fsContent.getName()); if (!extractIndex(aFile, false)) { logger.log(Level.WARNING, "Failed to extract Tika text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); //try to extract strings, if a file if (fsContent.isFile() == true) { processNonIngestible(fsContent); } } else { ingestStatus.put(aFile.getId(), IngestStatus.INGESTED); } } catch (IngesterException e) { logger.log(Level.INFO, "Could not extract text with Tika, " + fsContent.getId() + ", " + fsContent.getName(), e); ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED); //try to extract strings, if a file if (fsContent.isFile() == true) { processNonIngestible(fsContent); } } catch (Exception e) { logger.log(Level.WARNING, "Error extracting text with Tika, " + fsContent.getId() + ", " + fsContent.getName(), e); ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED); //try to extract strings if a file if (fsContent.isFile() == true) { processNonIngestible(fsContent); } } } else { //unallocated file or unsupported content type by Solr processNonIngestible(aFile); } } private boolean processNonIngestible(AbstractFile aFile) { try { if (!extractIndex(aFile, true)) { logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); return false; } else { ingestStatus.put(aFile.getId(), IngestStatus.EXTRACTED_INGESTED); return true; } } catch (IngesterException ex) { logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); return false; } } } /** * Searcher responsible for searching the current index and writing results * to blackboard and the inbox. Also, posts results to listeners as Ingest * data events. Searches entire index, and keeps track of only new results * to report and save. Runs as a background thread. */ private class Searcher extends SwingWorker<Object, Void> { private List<Keyword> keywords; private ProgressHandle progress; private final Logger logger = Logger.getLogger(Searcher.class.getName()); private boolean finalRun = false; Searcher(List<Keyword> keywords) { this.keywords = keywords; } Searcher(List<Keyword> keywords, boolean finalRun) { this(keywords); this.finalRun = finalRun; } @Override protected Object doInBackground() throws Exception { logger.log(Level.INFO, "Pending start of new searcher"); final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : ""); progress = ProgressHandleFactory.createHandle(displayName + (" (Pending)"), new Cancellable() { @Override public boolean cancel() { logger.log(Level.INFO, "Cancelling the searcher by user."); if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } return Searcher.this.cancel(true); } }); progress.start(); progress.switchToIndeterminate(); //block to ensure previous searcher is completely done with doInBackground() //even after previous searcher cancellation, we need to check this searcherLock.lock(); final StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { logger.log(Level.INFO, "Started a new searcher"); progress.setDisplayName(displayName); //make sure other searchers are not spawned searcherDone = false; runSearcher = false; if (searchTimer.isRunning()) { searchTimer.stop(); } int numSearched = 0; updateKeywords(); progress.switchToDeterminate(keywords.size()); for (Keyword keywordQuery : keywords) { if (this.isCancelled()) { logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); return null; } final String queryStr = keywordQuery.getQuery(); final KeywordSearchList list = keywordToList.get(queryStr); final String listName = list.getName(); //DEBUG //logger.log(Level.INFO, "Searching: " + queryStr); progress.progress(queryStr, numSearched); KeywordSearchQuery del = null; boolean isRegex = !keywordQuery.isLiteral(); if (!isRegex) { del = new LuceneQuery(keywordQuery); del.escape(); } else { del = new TermComponentQuery(keywordQuery); } Map<String, List<ContentHit>> queryResult = null; try { queryResult = del.performQuery(); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); //no reason to continue with next query if recovery failed //or wait for recovery to kick in and run again later //likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); return null; } catch (Exception e) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); continue; } //calculate new results but substracting results already obtained in this ingest Map<Keyword, List<ContentHit>> newResults = filterResults(queryResult, isRegex); if (!newResults.isEmpty()) { //write results to BB //new artifacts created, to report to listeners Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>(); for (final Keyword hitTerm : newResults.keySet()) { List<ContentHit> contentHitsAll = newResults.get(hitTerm); Map<AbstractFile, Integer> contentHitsFlattened = ContentHit.flattenResults(contentHitsAll); for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { String snippet = null; final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); int chunkId = contentHitsFlattened.get(hitFile); try { snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, isRegex, true); } catch (NoOpenCoreException e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //no reason to continue return null; } catch (Exception e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); continue; } KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); if (written == null) { logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); continue; } newArtifacts.add(written.getArtifact()); //generate a data message for each artifact StringBuilder subjectSb = new StringBuilder(); StringBuilder detailsSb = new StringBuilder(); //final int hitFiles = newResults.size(); if (!keywordQuery.isLiteral()) { subjectSb.append("RegExp hit: "); } else { subjectSb.append("Keyword hit: "); } //subjectSb.append("<"); String uniqueKey = null; BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); if (attr != null) { final String keyword = attr.getValueString(); subjectSb.append(keyword); uniqueKey = keyword.toLowerCase(); } //subjectSb.append(">"); //String uniqueKey = queryStr; //details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //hit detailsSb.append("<tr>"); detailsSb.append("<th>Keyword hit</th>"); detailsSb.append("<td>").append(StringEscapeUtils.escapeHtml(attr.getValueString())).append("</td>"); detailsSb.append("</tr>"); //preview attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>Preview</th>"); detailsSb.append("<td>").append(StringEscapeUtils.escapeHtml(attr.getValueString())).append("</td>"); detailsSb.append("</tr>"); } //file detailsSb.append("<tr>"); detailsSb.append("<th>File</th>"); if (hitFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { detailsSb.append("<td>").append(((FsContent) hitFile).getParentPath()).append(hitFile.getName()).append("</td>"); } else { detailsSb.append("<td>").append(hitFile.getName()).append("</td>"); } detailsSb.append("</tr>"); //list attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); detailsSb.append("<tr>"); detailsSb.append("<th>List</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); //regex if (!keywordQuery.isLiteral()) { attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>RegEx</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); } } detailsSb.append("</table>"); //check if should send messages on hits on this list if (list.getIngestMessages()) //post ingest inbox msg { services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } //for each term hit }//for each file hit //update artifact browser if (!newArtifacts.isEmpty()) { services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); } } progress.progress(queryStr, ++numSearched); } } //end try block catch (Exception ex) { logger.log(Level.WARNING, "searcher exception occurred", ex); } finally { finalizeSearcher(); stopWatch.stop(); logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); searcherLock.unlock(); } return null; } /** * Retrieve the updated keyword search lists from the XML loader */ private void updateKeywords() { KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); keywords.clear(); keywordToList.clear(); for (String name : keywordLists) { KeywordSearchList list = loader.getList(name); for (Keyword k : list.getKeywords()) { keywords.add(k); keywordToList.put(k.getQuery(), list); } } } //perform all essential cleanup that needs to be done right AFTER doInBackground() returns //without relying on done() method that is not guaranteed to run after background thread completes //NEED to call this method always right before doInBackground() returns /** * Performs the cleanup that needs to be done right AFTER * doInBackground() returns without relying on done() method that is not * guaranteed to run after background thread completes REQUIRED to call * this method always right before doInBackground() returns */ private void finalizeSearcher() { logger.log(Level.INFO, "Searcher finalizing"); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { progress.finish(); } }); searcherDone = true; //next currentSearcher can start if (finalRun) { //this is the final searcher logger.log(Level.INFO, "The final searcher in this ingest done."); finalSearcherDone = true; keywords.clear(); keywordLists.clear(); keywordToList.clear(); //reset current resuls earlier to potentially garbage collect sooner currentResults = new HashMap<Keyword, List<Long>>(); services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, KeywordSearchIngestModule.instance, "Completed")); } else { //start counting time for a new searcher to start //unless final searcher is pending if (finalSearcher != null) { searchTimer.start(); } } } //calculate new results but substracting results already obtained in this ingest //update currentResults map with the new results private Map<Keyword, List<ContentHit>> filterResults(Map<String, List<ContentHit>> queryResult, boolean isRegex) { Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>(); for (String termResult : queryResult.keySet()) { List<ContentHit> queryTermResults = queryResult.get(termResult); //translate to list of IDs that we keep track of List<Long> queryTermResultsIDs = new ArrayList<Long>(); for (ContentHit ch : queryTermResults) { queryTermResultsIDs.add(ch.getId()); } Keyword termResultK = new Keyword(termResult, !isRegex); List<Long> curTermResults = currentResults.get(termResultK); if (curTermResults == null) { currentResults.put(termResultK, queryTermResultsIDs); newResults.put(termResultK, queryTermResults); } else { //some AbstractFile hits already exist for this keyword for (ContentHit res : queryTermResults) { if (!curTermResults.contains(res.getId())) { //add to new results List<ContentHit> newResultsFs = newResults.get(termResultK); if (newResultsFs == null) { newResultsFs = new ArrayList<ContentHit>(); newResults.put(termResultK, newResultsFs); } newResultsFs.add(res); curTermResults.add(res.getId()); } } } } return newResults; } } /** * Set the skip known files setting on the module * * @param skip true if skip, otherwise, will process known files as well, as * reported by HashDB module */ void setSkipKnown(boolean skip) { this.skipKnown = skip; } boolean getSkipKnown() { return skipKnown; } /** * Set the scripts to use for string extraction. Takes effect on next ingest * start / at init(), not in effect if ingest is running * * @param scripts scripts to use for string extraction next time ingest * inits and runs */ void setStringExtractScripts(List<SCRIPT> scripts) { this.stringExtractScripts.clear(); this.stringExtractScripts.addAll(scripts); } /** * gets the currently set scripts to use * * @return the list of currently used script */ List<SCRIPT> getStringExtractScripts() { return new ArrayList<SCRIPT>(this.stringExtractScripts); } /** * Set / override string extract option * @param key option name to set * @param val option value to set */ void setStringExtractOption(String key, String val) { this.stringExtractOptions.put(key, val); } /** * get string extract option for the key * @param key option name * @return option string value, or empty string if the option is not set */ String getStringExtractOption(String key) { if (this.stringExtractOptions.containsKey(key)) { return this.stringExtractOptions.get(key); } else { return ""; } } }
KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java
/* * Autopsy Forensic Browser * * Copyright 2011 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.keywordsearch; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import javax.swing.Timer; import org.apache.commons.lang.StringEscapeUtils; import org.apache.solr.client.solrj.SolrServerException; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; import org.openide.util.Cancellable; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; import org.sleuthkit.autopsy.ingest.IngestModuleInit; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ContentVisitor; import org.sleuthkit.datamodel.File; import org.sleuthkit.datamodel.FsContent; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData.FileKnown; /** * An ingest module on a file level Performs indexing of allocated and Solr * supported files, string extraction and indexing of unallocated and not Solr * supported files Index commit is done periodically (determined by user set * ingest update interval) Runs a periodic keyword / regular expression search * on currently configured lists for ingest and writes results to blackboard * Reports interesting events to Inbox and to viewers * * Registered as a module in layer.xml */ public final class KeywordSearchIngestModule implements IngestModuleAbstractFile { private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName()); public static final String MODULE_NAME = "Keyword Search"; public static final String MODULE_DESCRIPTION = "Performs file indexing and periodic search using keywords and regular expressions in lists."; private static KeywordSearchIngestModule instance = null; private IngestServices services; private Ingester ingester = null; private volatile boolean commitIndex = false; //whether to commit index next time private volatile boolean runSearcher = false; //whether to run searcher next time private List<Keyword> keywords; //keywords to search private List<String> keywordLists; // lists currently being searched private Map<String, KeywordSearchList> keywordToList; //keyword to list name mapping private Timer commitTimer; private Timer searchTimer; //private static final int COMMIT_INTERVAL_MS = 10 * 60 * 1000; private Indexer indexer; private Searcher currentSearcher; private Searcher finalSearcher; private volatile boolean searcherDone = true; //mark as done, until it's inited private Map<Keyword, List<Long>> currentResults; private static final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy private static final Lock searcherLock = rwLock.writeLock(); private volatile int messageID = 0; private boolean processedFiles; private volatile boolean finalSearcherDone = true; //mark as done, until it's inited private final String hashDBModuleName = "Hash Lookup"; //NOTE this needs to match the HashDB module getName() private SleuthkitCase caseHandle = null; private boolean skipKnown = true; private boolean initialized = false; private List<AbstractFileExtract> textExtractors; private AbstractFileStringExtract stringExtractor; private final List<SCRIPT> stringExtractScripts = new ArrayList<SCRIPT>(); private Map<String,String> stringExtractOptions = new HashMap<String,String>(); private final GetIsFileKnownV getIsFileKnown = new GetIsFileKnownV(); private enum IngestStatus { INGESTED, EXTRACTED_INGESTED, SKIPPED, INGESTED_META }; private Map<Long, IngestStatus> ingestStatus; //private constructor to ensure singleton instance private KeywordSearchIngestModule() { //set default script stringExtractScripts.add(SCRIPT.LATIN_1); stringExtractScripts.add(SCRIPT.LATIN_2); stringExtractOptions.put(AbstractFileExtract.ExtractOptions.EXTRACT_UTF8.toString(), Boolean.TRUE.toString()); stringExtractOptions.put(AbstractFileExtract.ExtractOptions.EXTRACT_UTF16.toString(), Boolean.TRUE.toString()); } /** * Returns singleton instance of the module, creates one if needed * * @return instance of the module */ public static synchronized KeywordSearchIngestModule getDefault() { if (instance == null) { instance = new KeywordSearchIngestModule(); } return instance; } /** * Starts processing of every file provided by IngestManager. Checks if it * is time to commit and run search * * @param abstractFile file/unallocated file/directory to process * @return ProcessResult.OK in most cases and ERROR only if error in the * pipeline, otherwise does not advice to stop the pipeline */ @Override public ProcessResult process(AbstractFile abstractFile) { if (initialized == false) //error initializing indexing/Solr { return ProcessResult.OK; } //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it IngestModuleAbstractFile.ProcessResult hashDBResult = services.getAbstractFileModuleResult(hashDBModuleName); //logger.log(Level.INFO, "hashdb result: " + hashDBResult + "file: " + AbstractFile.getName()); if (hashDBResult == IngestModuleAbstractFile.ProcessResult.ERROR) { //index meta-data only indexer.indexFile(abstractFile, false); //notify depending module that keyword search (would) encountered error for this file return ProcessResult.ERROR; } else if (skipKnown && abstractFile.accept(getIsFileKnown) == true) { //index meta-data only indexer.indexFile(abstractFile, false); return ProcessResult.OK; } if (processedFiles == false) { processedFiles = true; } //check if it's time to commit after previous processing checkRunCommitSearch(); //index the file and content (if the content is supported) indexer.indexFile(abstractFile, true); return ProcessResult.OK; } /** * Process content hierarchy and return true if content is a file and is set as known */ private class GetIsFileKnownV extends ContentVisitor.Default<Boolean> { @Override protected Boolean defaultVisit(Content cntnt) { return false; } @Override public Boolean visit(File file) { return file.getKnown() == FileKnown.KNOWN; } } /** * After all files are ingested, execute final index commit and final search * Cleanup resources, threads, timers */ @Override public void complete() { if (initialized == false) { return; } //logger.log(Level.INFO, "complete()"); commitTimer.stop(); //handle case if previous search running //cancel it, will re-run after final commit //note: cancellation of Searcher worker is graceful (between keywords) if (currentSearcher != null) { currentSearcher.cancel(false); } //cancel searcher timer, ensure unwanted searcher does not start //before we start the final one if (searchTimer.isRunning()) { searchTimer.stop(); } runSearcher = false; logger.log(Level.INFO, "Running final index commit and search"); //final commit commit(); postIndexSummary(); //run one last search as there are probably some new files committed if (keywords != null && !keywords.isEmpty() && processedFiles == true) { finalSearcher = new Searcher(keywords, true); //final searcher run finalSearcher.execute(); } else { finalSearcherDone = true; services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Completed")); } //log number of files / chunks in index //signal a potential change in number of indexed files try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); } catch (SolrServerException se) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); } //postSummary(); } /** * Handle stop event (ingest interrupted) Cleanup resources, threads, timers */ @Override public void stop() { logger.log(Level.INFO, "stop()"); //stop timer commitTimer.stop(); //stop currentSearcher if (currentSearcher != null) { currentSearcher.cancel(true); } //cancel searcher timer, ensure unwanted searcher does not start if (searchTimer.isRunning()) { searchTimer.stop(); } runSearcher = false; finalSearcherDone = true; //commit uncommited files, don't search again commit(); //postSummary(); } @Override public String getName() { return MODULE_NAME; } @Override public String getDescription() { return MODULE_DESCRIPTION; } /** * Initializes the module for new ingest run Sets up threads, timers, * retrieves settings, keyword lists to run on * * @param services */ @Override public void init(IngestModuleInit initContext) { logger.log(Level.INFO, "init()"); services = IngestServices.getDefault(); initialized = false; caseHandle = Case.getCurrentCase().getSleuthkitCase(); ingester = Server.getIngester(); //initialize extractors stringExtractor = new AbstractFileStringExtract(); stringExtractor.setScripts(stringExtractScripts); stringExtractor.setOptions(stringExtractOptions); //log the scripts used for debugging final StringBuilder sbScripts = new StringBuilder(); for (SCRIPT s : stringExtractScripts) { sbScripts.append(s.name()).append(" "); } logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); textExtractors = new ArrayList<AbstractFileExtract>(); //order matters, more specific extractors first textExtractors.add(new AbstractFileHtmlExtract()); textExtractors.add(new AbstractFileTikaTextExtract()); ingestStatus = new HashMap<Long, IngestStatus>(); keywords = new ArrayList<Keyword>(); keywordLists = new ArrayList<String>(); keywordToList = new HashMap<String, KeywordSearchList>(); initKeywords(); if (keywords.isEmpty() || keywordLists.isEmpty()) { services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, "No keywords in keyword list.", "Only indexing will be done and and keyword search will be skipped (it can be executed later again as ingest or using toolbar search feature).")); } processedFiles = false; finalSearcherDone = false; searcherDone = true; //make sure to start the initial currentSearcher //keeps track of all results per run not to repeat reporting the same hits currentResults = new HashMap<Keyword, List<Long>>(); indexer = new Indexer(); final int updateIntervalMs = services.getUpdateFrequency() * 60 * 1000; logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); initialized = true; commitTimer.start(); searchTimer.start(); services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Started")); } @Override public ModuleType getType() { return ModuleType.AbstractFile; } @Override public boolean hasSimpleConfiguration() { return true; } @Override public boolean hasAdvancedConfiguration() { return true; } @Override public javax.swing.JPanel getSimpleConfiguration() { return new KeywordSearchIngestSimplePanel(); } @Override public javax.swing.JPanel getAdvancedConfiguration() { return KeywordSearchConfigurationPanel.getDefault(); } @Override public void saveAdvancedConfiguration() { } @Override public void saveSimpleConfiguration() { } /** * The modules maintains background threads, return true if background * threads are running or there are pending tasks to be run in the future, * such as the final search post-ingest completion * * @return */ @Override public boolean hasBackgroundJobsRunning() { if ((currentSearcher != null && searcherDone == false) || (finalSearcherDone == false)) { return true; } else { return false; } } /** * Commits index and notifies listeners of index update */ private void commit() { if (initialized) { logger.log(Level.INFO, "Commiting index"); ingester.commit(); logger.log(Level.INFO, "Index comitted"); //signal a potential change in number of indexed files indexChangeNotify(); } } /** * Posts inbox message with summary of indexed files */ private void postIndexSummary() { int indexed = 0; int indexed_meta = 0; int indexed_extr = 0; int skipped = 0; for (IngestStatus s : ingestStatus.values()) { switch (s) { case INGESTED: ++indexed; break; case INGESTED_META: ++indexed_meta; break; case EXTRACTED_INGESTED: ++indexed_extr; break; case SKIPPED: ++skipped; break; default: ; } } StringBuilder msg = new StringBuilder(); msg.append("Indexed files: ").append(indexed).append("<br />Indexed strings: ").append(indexed_extr); msg.append("<br />Indexed meta-data only: ").append(indexed_meta).append("<br />"); msg.append("<br />Skipped files: ").append(skipped).append("<br />"); String indexStats = msg.toString(); logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Keyword Indexing Completed", indexStats)); } /** * Helper method to notify listeners on index update */ private void indexChangeNotify() { //signal a potential change in number of indexed files try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); KeywordSearch.changeSupport.firePropertyChange(KeywordSearch.NUM_FILES_CHANGE_EVT, null, new Integer(numIndexedFiles)); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); } catch (SolrServerException se) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); } } /** * Initialize the keyword search lists from the XML loader */ private void initKeywords() { KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); keywords.clear(); keywordLists.clear(); keywordToList.clear(); for (KeywordSearchList list : loader.getListsL()) { String listName = list.getName(); if (list.getUseForIngest()) { keywordLists.add(listName); } for (Keyword keyword : list.getKeywords()) { keywords.add(keyword); keywordToList.put(keyword.getQuery(), list); } } } List<String> getKeywordLists() { return keywordLists == null ? new ArrayList<String>() : keywordLists; } /** * Check if time to commit, if so, run commit. Then run search if search * timer is also set. */ void checkRunCommitSearch() { if (commitIndex) { logger.log(Level.INFO, "Commiting index"); commit(); commitIndex = false; //after commit, check if time to run searcher //NOTE commit/searcher timings don't need to align //in worst case, we will run search next time after commit timer goes off, or at the end of ingest if (searcherDone && runSearcher) { //start search if previous not running if (keywords != null && !keywords.isEmpty()) { currentSearcher = new Searcher(keywords); currentSearcher.execute();//searcher will stop timer and restart timer when done } } } } /** * CommitTimerAction to run by commitTimer Sets a flag to indicate we are * ready for commit */ private class CommitTimerAction implements ActionListener { private final Logger logger = Logger.getLogger(CommitTimerAction.class.getName()); @Override public void actionPerformed(ActionEvent e) { commitIndex = true; logger.log(Level.INFO, "CommitTimer awake"); } } /** * SearchTimerAction to run by searchTimer Sets a flag to indicate we are * ready to search */ private class SearchTimerAction implements ActionListener { private final Logger logger = Logger.getLogger(SearchTimerAction.class.getName()); @Override public void actionPerformed(ActionEvent e) { runSearcher = true; logger.log(Level.INFO, "SearchTimer awake"); } } /** * File indexer, processes and indexes known/allocated files, * unknown/unallocated files and directories accordingly */ private class Indexer { private final Logger logger = Logger.getLogger(Indexer.class.getName()); /** * Extract strings or text with Tika (by streaming) from the file Divide * the file into chunks and index the chunks * * @param aFile file to extract strings from, divide into chunks and * index * @param stringsOnly true if use string extraction, false if to use a * content-type specific text extractor * @return true if the file was indexed, false otherwise * @throws IngesterException exception thrown if indexing failed */ private boolean extractIndex(AbstractFile aFile, boolean stringsOnly) throws IngesterException { AbstractFileExtract fileExtract = null; if (stringsOnly) { fileExtract = stringExtractor; } else { //go over available text extractors and pick the first one (most specific one) for (AbstractFileExtract fe : textExtractors) { if (fe.isSupported(aFile)) { fileExtract = fe; break; } } } if (fileExtract == null) { throw new IngesterException("No supported file extractor found for file: " + aFile.getId() + " " + aFile.getName()); } //logger.log(Level.INFO, "Extractor: " + fileExtract + ", file: " + aFile.getName()); //divide into chunks and index return fileExtract.index(aFile); } private boolean isTextExtractSupported(AbstractFile aFile) { for (AbstractFileExtract extractor : textExtractors) { if (extractor.isContentTypeSpecific() == true && extractor.isSupported(aFile)) { return true; } } return false; } private void indexFile(AbstractFile aFile, boolean indexContent) { //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); FsContent fsContent = null; //check if alloc fs file or dir TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { //skip indexing of virtual dirs (no content, no real name) - will index children files return; } else if (aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { fsContent = (FsContent) aFile; } final long size = aFile.getSize(); //if alloc fs file and not to index content, or a dir, or 0 content, index meta data only if (fsContent != null && (indexContent == false || fsContent.isDir() || size == 0)) { try { ingester.ingest(fsContent, false); //meta-data only ingestStatus.put(aFile.getId(), IngestStatus.INGESTED_META); } catch (IngesterException ex) { ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); logger.log(Level.WARNING, "Unable to index meta-data for fsContent: " + fsContent.getId(), ex); } return; } boolean extractTextSupported = isTextExtractSupported(aFile); if (fsContent != null && extractTextSupported) { //we know it's an allocated FS file (since it's FsContent) //extract text with one of the extractors, divide into chunks and index with Solr try { //logger.log(Level.INFO, "indexing: " + fsContent.getName()); if (!extractIndex(aFile, false)) { logger.log(Level.WARNING, "Failed to extract Tika text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); //try to extract strings, if a file if (fsContent.isFile() == true) { processNonIngestible(fsContent); } } else { ingestStatus.put(aFile.getId(), IngestStatus.INGESTED); } } catch (IngesterException e) { logger.log(Level.INFO, "Could not extract text with Tika, " + fsContent.getId() + ", " + fsContent.getName(), e); ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED); //try to extract strings, if a file if (fsContent.isFile() == true) { processNonIngestible(fsContent); } } catch (Exception e) { logger.log(Level.WARNING, "Error extracting text with Tika, " + fsContent.getId() + ", " + fsContent.getName(), e); ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED); //try to extract strings if a file if (fsContent.isFile() == true) { processNonIngestible(fsContent); } } } else { //unallocated file or unsupported content type by Solr processNonIngestible(aFile); } } private boolean processNonIngestible(AbstractFile aFile) { try { if (!extractIndex(aFile, true)) { logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); return false; } else { ingestStatus.put(aFile.getId(), IngestStatus.EXTRACTED_INGESTED); return true; } } catch (IngesterException ex) { logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); return false; } } } /** * Searcher responsible for searching the current index and writing results * to blackboard and the inbox. Also, posts results to listeners as Ingest * data events. Searches entire index, and keeps track of only new results * to report and save. Runs as a background thread. */ private class Searcher extends SwingWorker<Object, Void> { private List<Keyword> keywords; private ProgressHandle progress; private final Logger logger = Logger.getLogger(Searcher.class.getName()); private boolean finalRun = false; Searcher(List<Keyword> keywords) { this.keywords = keywords; } Searcher(List<Keyword> keywords, boolean finalRun) { this(keywords); this.finalRun = finalRun; } @Override protected Object doInBackground() throws Exception { logger.log(Level.INFO, "Pending start of new searcher"); final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : ""); progress = ProgressHandleFactory.createHandle(displayName + (" (Pending)"), new Cancellable() { @Override public boolean cancel() { logger.log(Level.INFO, "Cancelling the searcher by user."); if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } return Searcher.this.cancel(true); } }); progress.start(); progress.switchToIndeterminate(); //block to ensure previous searcher is completely done with doInBackground() //even after previous searcher cancellation, we need to check this searcherLock.lock(); final StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { logger.log(Level.INFO, "Started a new searcher"); progress.setDisplayName(displayName); //make sure other searchers are not spawned searcherDone = false; runSearcher = false; if (searchTimer.isRunning()) { searchTimer.stop(); } int numSearched = 0; updateKeywords(); progress.switchToDeterminate(keywords.size()); for (Keyword keywordQuery : keywords) { if (this.isCancelled()) { logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); return null; } final String queryStr = keywordQuery.getQuery(); final KeywordSearchList list = keywordToList.get(queryStr); final String listName = list.getName(); //DEBUG //logger.log(Level.INFO, "Searching: " + queryStr); progress.progress(queryStr, numSearched); KeywordSearchQuery del = null; boolean isRegex = !keywordQuery.isLiteral(); if (!isRegex) { del = new LuceneQuery(keywordQuery); del.escape(); } else { del = new TermComponentQuery(keywordQuery); } Map<String, List<ContentHit>> queryResult = null; try { queryResult = del.performQuery(); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); //no reason to continue with next query if recovery failed //or wait for recovery to kick in and run again later //likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); return null; } catch (Exception e) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); continue; } //calculate new results but substracting results already obtained in this ingest Map<Keyword, List<ContentHit>> newResults = filterResults(queryResult, isRegex); if (!newResults.isEmpty()) { //write results to BB //new artifacts created, to report to listeners Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>(); for (final Keyword hitTerm : newResults.keySet()) { List<ContentHit> contentHitsAll = newResults.get(hitTerm); Map<AbstractFile, Integer> contentHitsFlattened = ContentHit.flattenResults(contentHitsAll); for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { String snippet = null; final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); int chunkId = contentHitsFlattened.get(hitFile); try { snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, isRegex, true); } catch (NoOpenCoreException e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //no reason to continue return null; } catch (Exception e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); continue; } KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); if (written == null) { logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); continue; } newArtifacts.add(written.getArtifact()); //generate a data message for each artifact StringBuilder subjectSb = new StringBuilder(); StringBuilder detailsSb = new StringBuilder(); //final int hitFiles = newResults.size(); if (!keywordQuery.isLiteral()) { subjectSb.append("RegExp hit: "); } else { subjectSb.append("Keyword hit: "); } //subjectSb.append("<"); String uniqueKey = null; BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); if (attr != null) { final String keyword = attr.getValueString(); subjectSb.append(keyword); uniqueKey = keyword.toLowerCase(); } //subjectSb.append(">"); //String uniqueKey = queryStr; //details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //hit detailsSb.append("<tr>"); detailsSb.append("<th>Keyword hit</th>"); detailsSb.append("<td>").append(StringEscapeUtils.escapeHtml(attr.getValueString())).append("</td>"); detailsSb.append("</tr>"); //preview attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>Preview</th>"); detailsSb.append("<td>").append(StringEscapeUtils.escapeHtml(attr.getValueString())).append("</td>"); detailsSb.append("</tr>"); } //file detailsSb.append("<tr>"); detailsSb.append("<th>File</th>"); if (hitFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { detailsSb.append("<td>").append(((FsContent) hitFile).getParentPath()).append(hitFile.getName()).append("</td>"); } else { detailsSb.append("<td>").append(hitFile.getName()).append("</td>"); } detailsSb.append("</tr>"); //list attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); detailsSb.append("<tr>"); detailsSb.append("<th>List</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); //regex if (!keywordQuery.isLiteral()) { attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>RegEx</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); } } detailsSb.append("</table>"); //check if should send messages on hits on this list if (list.getIngestMessages()) //post ingest inbox msg { services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } //for each term hit }//for each file hit //update artifact browser if (!newArtifacts.isEmpty()) { services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); } } progress.progress(queryStr, ++numSearched); } } //end try block catch (Exception ex) { logger.log(Level.WARNING, "searcher exception occurred", ex); } finally { finalizeSearcher(); stopWatch.stop(); logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); searcherLock.unlock(); } return null; } /** * Retrieve the updated keyword search lists from the XML loader */ private void updateKeywords() { KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); keywords.clear(); keywordToList.clear(); for (String name : keywordLists) { KeywordSearchList list = loader.getList(name); for (Keyword k : list.getKeywords()) { keywords.add(k); keywordToList.put(k.getQuery(), list); } } } //perform all essential cleanup that needs to be done right AFTER doInBackground() returns //without relying on done() method that is not guaranteed to run after background thread completes //NEED to call this method always right before doInBackground() returns /** * Performs the cleanup that needs to be done right AFTER * doInBackground() returns without relying on done() method that is not * guaranteed to run after background thread completes REQUIRED to call * this method always right before doInBackground() returns */ private void finalizeSearcher() { logger.log(Level.INFO, "Searcher finalizing"); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { progress.finish(); } }); searcherDone = true; //next currentSearcher can start if (finalRun) { //this is the final searcher logger.log(Level.INFO, "The final searcher in this ingest done."); finalSearcherDone = true; keywords.clear(); keywordLists.clear(); keywordToList.clear(); //reset current resuls earlier to potentially garbage collect sooner currentResults = new HashMap<Keyword, List<Long>>(); services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, KeywordSearchIngestModule.instance, "Completed")); } else { //start counting time for a new searcher to start //unless final searcher is pending if (finalSearcher != null) { searchTimer.start(); } } } //calculate new results but substracting results already obtained in this ingest //update currentResults map with the new results private Map<Keyword, List<ContentHit>> filterResults(Map<String, List<ContentHit>> queryResult, boolean isRegex) { Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>(); for (String termResult : queryResult.keySet()) { List<ContentHit> queryTermResults = queryResult.get(termResult); //translate to list of IDs that we keep track of List<Long> queryTermResultsIDs = new ArrayList<Long>(); for (ContentHit ch : queryTermResults) { queryTermResultsIDs.add(ch.getId()); } Keyword termResultK = new Keyword(termResult, !isRegex); List<Long> curTermResults = currentResults.get(termResultK); if (curTermResults == null) { currentResults.put(termResultK, queryTermResultsIDs); newResults.put(termResultK, queryTermResults); } else { //some AbstractFile hits already exist for this keyword for (ContentHit res : queryTermResults) { if (!curTermResults.contains(res.getId())) { //add to new results List<ContentHit> newResultsFs = newResults.get(termResultK); if (newResultsFs == null) { newResultsFs = new ArrayList<ContentHit>(); newResults.put(termResultK, newResultsFs); } newResultsFs.add(res); curTermResults.add(res.getId()); } } } } return newResults; } } /** * Set the skip known files setting on the module * * @param skip true if skip, otherwise, will process known files as well, as * reported by HashDB module */ void setSkipKnown(boolean skip) { this.skipKnown = skip; } boolean getSkipKnown() { return skipKnown; } /** * Set the scripts to use for string extraction. Takes effect on next ingest * start / at init(), not in effect if ingest is running * * @param scripts scripts to use for string extraction next time ingest * inits and runs */ void setStringExtractScripts(List<SCRIPT> scripts) { this.stringExtractScripts.clear(); this.stringExtractScripts.addAll(scripts); } /** * gets the currently set scripts to use * * @return the list of currently used script */ List<SCRIPT> getStringExtractScripts() { return new ArrayList<SCRIPT>(this.stringExtractScripts); } /** * Set / override string extract option * @param key option name to set * @param val option value to set */ void setStringExtractOption(String key, String val) { this.stringExtractOptions.put(key, val); } /** * get string extract option for the key * @param key option name * @return option string value, or empty string if the option is not set */ String getStringExtractOption(String key) { if (this.stringExtractOptions.containsKey(key)) { return this.stringExtractOptions.get(key); } else { return ""; } } }
make default string extract script in ingest to be Latin basic only, not also extended.
KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java
make default string extract script in ingest to be Latin basic only, not also extended.
Java
apache-2.0
601605b9a9c9580a5f276854e5d96d21ab2c1e20
0
adufilie/flex-falcon,greg-dove/flex-falcon,greg-dove/flex-falcon,greg-dove/flex-falcon,adufilie/flex-falcon,adufilie/flex-falcon,adufilie/flex-falcon,greg-dove/flex-falcon
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.internal.codegen.js.goog; import java.io.FilterWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.flex.compiler.codegen.IASGlobalFunctionConstants.BuiltinType; import org.apache.flex.compiler.codegen.IDocEmitter; import org.apache.flex.compiler.codegen.js.goog.IJSGoogDocEmitter; import org.apache.flex.compiler.codegen.js.goog.IJSGoogEmitter; import org.apache.flex.compiler.common.ASModifier; import org.apache.flex.compiler.common.ModifiersSet; import org.apache.flex.compiler.constants.IASLanguageConstants; import org.apache.flex.compiler.definitions.IClassDefinition; import org.apache.flex.compiler.definitions.IDefinition; import org.apache.flex.compiler.definitions.IFunctionDefinition; import org.apache.flex.compiler.definitions.IPackageDefinition; import org.apache.flex.compiler.definitions.ITypeDefinition; import org.apache.flex.compiler.internal.codegen.as.ASEmitterTokens; import org.apache.flex.compiler.internal.codegen.js.JSEmitter; import org.apache.flex.compiler.internal.codegen.js.JSEmitterTokens; import org.apache.flex.compiler.internal.definitions.ClassDefinition; import org.apache.flex.compiler.internal.scopes.PackageScope; import org.apache.flex.compiler.internal.tree.as.ChainedVariableNode; import org.apache.flex.compiler.internal.tree.as.FunctionCallNode; import org.apache.flex.compiler.internal.tree.as.FunctionNode; import org.apache.flex.compiler.problems.ICompilerProblem; import org.apache.flex.compiler.projects.ICompilerProject; import org.apache.flex.compiler.scopes.IASScope; import org.apache.flex.compiler.tree.ASTNodeID; import org.apache.flex.compiler.tree.as.IASNode; import org.apache.flex.compiler.tree.as.IAccessorNode; import org.apache.flex.compiler.tree.as.IBinaryOperatorNode; import org.apache.flex.compiler.tree.as.IClassNode; import org.apache.flex.compiler.tree.as.IContainerNode; import org.apache.flex.compiler.tree.as.IDefinitionNode; import org.apache.flex.compiler.tree.as.IEmbedNode; import org.apache.flex.compiler.tree.as.IExpressionNode; import org.apache.flex.compiler.tree.as.IForLoopNode; import org.apache.flex.compiler.tree.as.IFunctionCallNode; import org.apache.flex.compiler.tree.as.IFunctionNode; import org.apache.flex.compiler.tree.as.IGetterNode; import org.apache.flex.compiler.tree.as.IIdentifierNode; import org.apache.flex.compiler.tree.as.IInterfaceNode; import org.apache.flex.compiler.tree.as.INamespaceAccessExpressionNode; import org.apache.flex.compiler.tree.as.IParameterNode; import org.apache.flex.compiler.tree.as.IScopedNode; import org.apache.flex.compiler.tree.as.ISetterNode; import org.apache.flex.compiler.tree.as.ITypeNode; import org.apache.flex.compiler.tree.as.ITypedExpressionNode; import org.apache.flex.compiler.tree.as.IVariableExpressionNode; import org.apache.flex.compiler.tree.as.IVariableNode; import org.apache.flex.compiler.utils.ASNodeUtils; /** * Concrete implementation of the 'goog' JavaScript production. * * @author Michael Schmalle * @author Erik de Bruin */ public class JSGoogEmitter extends JSEmitter implements IJSGoogEmitter { protected static final String CONSTRUCTOR_EMPTY = "emptyConstructor"; protected static final String CONSTRUCTOR_FULL = "fullConstructor"; protected static final String SUPER_FUNCTION_CALL = "replaceSuperFunction"; protected List<String> propertyNames = new ArrayList<String>(); protected ICompilerProject project; protected IJSGoogDocEmitter getDoc() { return (IJSGoogDocEmitter) getDocEmitter(); } @Override public IDocEmitter getDocEmitter() { return new JSGoogDocEmitter(this); } //-------------------------------------------------------------------------- // //-------------------------------------------------------------------------- @Override public void emitPackageHeader(IPackageDefinition definition) { IASScope containedScope = definition.getContainedScope(); ITypeDefinition type = findType(containedScope.getAllLocalDefinitions()); if (type == null) return; /* goog.provide('x');\n\n */ write(JSGoogEmitterTokens.GOOG_PROVIDE); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.SINGLE_QUOTE); write(type.getQualifiedName()); write(ASEmitterTokens.SINGLE_QUOTE); write(ASEmitterTokens.PAREN_CLOSE); writeNewline(ASEmitterTokens.SEMICOLON); writeNewline(); } @Override public void emitPackageHeaderContents(IPackageDefinition definition) { PackageScope containedScope = (PackageScope) definition .getContainedScope(); ITypeDefinition type = findType(containedScope.getAllLocalDefinitions()); if (type == null) return; List<String> list = resolveImports(type); for (String imp : list) { if (imp.indexOf(JSGoogEmitterTokens.AS3.getToken()) != -1) continue; /* goog.require('x');\n */ write(JSGoogEmitterTokens.GOOG_REQUIRE); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.SINGLE_QUOTE); write(imp); write(ASEmitterTokens.SINGLE_QUOTE); write(ASEmitterTokens.PAREN_CLOSE); writeNewline(ASEmitterTokens.SEMICOLON); } // (erikdebruin) only write 'closing' line break when there are // actually imports... if (list.size() > 1 || (list.size() == 1 && list.get(0).indexOf( JSGoogEmitterTokens.AS3.getToken()) == -1)) { writeNewline(); } } @Override public void emitPackageContents(IPackageDefinition definition) { IASScope containedScope = definition.getContainedScope(); ITypeDefinition type = findType(containedScope.getAllLocalDefinitions()); if (type == null) return; ITypeNode tnode = findTypeNode(definition.getNode()); if (tnode != null) { getWalker().walk(tnode); // IClassNode | IInterfaceNode } } @Override public void emitPackageFooter(IPackageDefinition definition) { } //-------------------------------------------------------------------------- // //-------------------------------------------------------------------------- @Override public void emitClass(IClassNode node) { IClassDefinition definition = node.getDefinition(); IFunctionDefinition ctorDefinition = definition.getConstructor(); // Static-only (Singleton) classes may not have a constructor if (ctorDefinition != null) { IFunctionNode ctorNode = (IFunctionNode) ctorDefinition.getNode(); if (ctorNode != null) { // constructor emitMethod(ctorNode); write(ASEmitterTokens.SEMICOLON); } else { String qname = definition.getQualifiedName(); if (qname != null && !qname.equals("")) { write(qname); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.PAREN_CLOSE); write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); write(ASEmitterTokens.SEMICOLON); } } } IDefinitionNode[] dnodes = node.getAllMemberNodes(); for (IDefinitionNode dnode : dnodes) { if (dnode.getNodeID() == ASTNodeID.VariableID) { writeNewline(); writeNewline(); emitField((IVariableNode) dnode); write(ASEmitterTokens.SEMICOLON); } else if (dnode.getNodeID() == ASTNodeID.FunctionID) { if (!((IFunctionNode) dnode).isConstructor()) { writeNewline(); writeNewline(); emitMethod((IFunctionNode) dnode); write(ASEmitterTokens.SEMICOLON); } } else if (dnode.getNodeID() == ASTNodeID.GetterID || dnode.getNodeID() == ASTNodeID.SetterID) { writeNewline(); writeNewline(); emitAccessors((IAccessorNode) dnode); write(ASEmitterTokens.SEMICOLON); } } } @Override public void emitInterface(IInterfaceNode node) { ICompilerProject project = getWalker().getProject(); getDoc().emitInterfaceDoc(node, project); String qname = node.getQualifiedName(); if (qname != null && !qname.equals("")) { write(qname); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.PAREN_CLOSE); write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); write(ASEmitterTokens.SEMICOLON); } final IDefinitionNode[] members = node.getAllMemberDefinitionNodes(); for (IDefinitionNode mnode : members) { boolean isAccessor = mnode.getNodeID() == ASTNodeID.GetterID || mnode.getNodeID() == ASTNodeID.SetterID; if (!isAccessor || !propertyNames.contains(qname)) { writeNewline(); write(qname); write(ASEmitterTokens.MEMBER_ACCESS); write(JSEmitterTokens.PROTOTYPE); write(ASEmitterTokens.MEMBER_ACCESS); write(mnode.getQualifiedName()); if (isAccessor && !propertyNames.contains(qname)) { propertyNames.add(qname); } else { write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); emitParameters(((IFunctionNode) mnode).getParameterNodes()); write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); } write(ASEmitterTokens.SEMICOLON); } } } @Override public void emitField(IVariableNode node) { IClassDefinition definition = getClassDefinition(node); IDefinition def = null; IExpressionNode enode = node.getVariableTypeNode();//getAssignedValueNode(); if (enode != null) def = enode.resolveType(getWalker().getProject()); getDoc().emitFieldDoc(node, def); /* x.prototype.y = z */ ModifiersSet modifierSet = node.getDefinition().getModifiers(); String root = ""; if (modifierSet != null && !modifierSet.hasModifier(ASModifier.STATIC)) { root = JSEmitterTokens.PROTOTYPE.getToken(); root += ASEmitterTokens.MEMBER_ACCESS.getToken(); } write(definition.getQualifiedName() + ASEmitterTokens.MEMBER_ACCESS.getToken() + root + node.getName()); IExpressionNode vnode = node.getAssignedValueNode(); if (vnode != null) { write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); getWalker().walk(vnode); } if (!(node instanceof ChainedVariableNode)) { int len = node.getChildCount(); for (int i = 0; i < len; i++) { IASNode child = node.getChild(i); if (child instanceof ChainedVariableNode) { writeNewline(ASEmitterTokens.SEMICOLON); writeNewline(); emitField((IVariableNode) child); } } } } @Override public void emitVarDeclaration(IVariableNode node) { if (!(node instanceof ChainedVariableNode) && !node.isConst()) { emitMemberKeyword(node); } IExpressionNode avnode = node.getAssignedValueNode(); if (avnode != null) { IDefinition def = avnode.resolveType(getWalker().getProject()); String opcode = avnode.getNodeID().getParaphrase(); if (opcode != "AnonymousFunction") getDoc().emitVarDoc(node, def); } else { getDoc().emitVarDoc(node, null); } emitDeclarationName(node); if (!(avnode instanceof IEmbedNode)) emitAssignedValue(avnode); if (!(node instanceof ChainedVariableNode)) { // check for chained variables int len = node.getChildCount(); for (int i = 0; i < len; i++) { IASNode child = node.getChild(i); if (child instanceof ChainedVariableNode) { writeToken(ASEmitterTokens.COMMA); emitVarDeclaration((IVariableNode) child); } } } } @Override public void emitGetAccessor(IGetterNode node) { emitObjectDefineProperty(node); } @Override public void emitSetAccessor(ISetterNode node) { emitObjectDefineProperty(node); } protected void emitAccessors(IAccessorNode node) { String qname = node.getQualifiedName(); if (!propertyNames.contains(qname)) { emitField(node); write(ASEmitterTokens.SEMICOLON); writeNewline(); writeNewline(); propertyNames.add(qname); } if (node.getNodeID() == ASTNodeID.GetterID) { emitGetAccessor((IGetterNode) node); } else if (node.getNodeID() == ASTNodeID.SetterID) { emitSetAccessor((ISetterNode) node); } } @Override public void emitMethod(IFunctionNode node) { FunctionNode fn = (FunctionNode) node; fn.parseFunctionBody(new ArrayList<ICompilerProblem>()); ICompilerProject project = getWalker().getProject(); getDoc().emitMethodDoc(node, project); boolean isConstructor = node.isConstructor(); String qname = getTypeDefinition(node).getQualifiedName(); if (qname != null && !qname.equals("")) { write(qname); if (!isConstructor) { write(ASEmitterTokens.MEMBER_ACCESS); if (!fn.hasModifier(ASModifier.STATIC)) { write(JSEmitterTokens.PROTOTYPE); write(ASEmitterTokens.MEMBER_ACCESS); } } } if (!isConstructor) emitMemberName(node); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); emitParameters(node.getParameterNodes()); boolean hasSuperClass = hasSuperClass(node); if (isConstructor && node.getScopedNode().getChildCount() == 0) { write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); if (hasSuperClass) emitSuperCall(node, CONSTRUCTOR_EMPTY); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); } if (!isConstructor || node.getScopedNode().getChildCount() > 0) emitMethodScope(node.getScopedNode()); if (isConstructor && hasSuperClass) { writeNewline(); write(JSGoogEmitterTokens.GOOG_INHERITS); write(ASEmitterTokens.PAREN_OPEN); write(qname); writeToken(ASEmitterTokens.COMMA); String sname = getSuperClassDefinition(node, project) .getQualifiedName(); write(sname); write(ASEmitterTokens.PAREN_CLOSE); } } @Override public void emitFunctionCall(IFunctionCallNode node) { IASNode cnode = node.getChild(0); if (cnode.getNodeID() == ASTNodeID.MemberAccessExpressionID) cnode = cnode.getChild(0); ASTNodeID id = cnode.getNodeID(); if (id != ASTNodeID.SuperID) { if (node.isNewExpression()) { writeToken(ASEmitterTokens.NEW); } getWalker().walk(node.getNameNode()); write(ASEmitterTokens.PAREN_OPEN); walkArguments(node.getArgumentNodes()); write(ASEmitterTokens.PAREN_CLOSE); } else { emitSuperCall(node, SUPER_FUNCTION_CALL); } } @Override public void emitIdentifier(IIdentifierNode node) { ICompilerProject project = getWalker().getProject(); IClassNode cnode = (IClassNode) node .getAncestorOfType(IClassNode.class); IDefinition def = ((IIdentifierNode) node).resolve(project); ITypeDefinition type = ((IIdentifierNode) node).resolveType(project); IASNode pnode = node.getParent(); ASTNodeID inode = pnode.getNodeID(); boolean writeSelf = false; if (cnode != null) { IDefinitionNode[] members = cnode.getAllMemberNodes(); for (IDefinitionNode mnode : members) { if ((type != null && type.getQualifiedName().equalsIgnoreCase( IASLanguageConstants.Function)) || (def != null && def.getQualifiedName() .equalsIgnoreCase(mnode.getQualifiedName()))) { if (!(pnode instanceof FunctionNode) && inode != ASTNodeID.MemberAccessExpressionID) { writeSelf = true; break; } else if (inode == ASTNodeID.MemberAccessExpressionID && !def.isStatic()) { String tname = type.getQualifiedName(); writeSelf = !tname.equalsIgnoreCase(cnode .getQualifiedName()) && !tname.equals(IASLanguageConstants.Function); break; } } } } boolean isRunningInTestMode = cnode != null && cnode.getQualifiedName().equalsIgnoreCase("FalconTest_A"); if (writeSelf && !isRunningInTestMode) { write(JSGoogEmitterTokens.SELF); write(ASEmitterTokens.MEMBER_ACCESS); } else { String pname = (type != null) ? type.getPackageName() : ""; if (cnode != null && pname != "" && !pname.equalsIgnoreCase(cnode.getPackageName()) && inode != ASTNodeID.ArgumentID && inode != ASTNodeID.VariableID && inode != ASTNodeID.TypedExpressionID) { write(pname); write(ASEmitterTokens.MEMBER_ACCESS); } } super.emitIdentifier(node); } @Override public void emitFunctionBlockHeader(IFunctionNode node) { IDefinition def = node.getDefinition(); boolean isStatic = false; if (def != null && def.isStatic()) isStatic = true; boolean isLocal = false; if (node.getFunctionClassification() == IFunctionDefinition.FunctionClassification.LOCAL) isLocal = true; if (hasBody(node) && !isStatic && !isLocal) emitSelfReference(node); if (node.isConstructor() && hasSuperClass(node) && !hasSuperCall(node.getScopedNode())) emitSuperCall(node, CONSTRUCTOR_FULL); emitRestParameterCodeBlock(node); emitDefaultParameterCodeBlock(node); } protected void emitSelfReference(IFunctionNode node) { writeToken(ASEmitterTokens.VAR); writeToken(JSGoogEmitterTokens.SELF); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.THIS); writeNewline(ASEmitterTokens.SEMICOLON); } protected void emitSuperCall(IASNode node, String type) { IFunctionNode fnode = (node instanceof IFunctionNode) ? (IFunctionNode) node : null; IFunctionCallNode fcnode = (node instanceof IFunctionCallNode) ? (FunctionCallNode) node : null; if (type == CONSTRUCTOR_EMPTY) { indentPush(); writeNewline(); indentPop(); } else if (type == SUPER_FUNCTION_CALL) { if (fnode == null) fnode = (IFunctionNode) fcnode .getAncestorOfType(IFunctionNode.class); } if (fnode.isConstructor() && !hasSuperClass(fnode)) return; IClassNode cnode = (IClassNode) node .getAncestorOfType(IClassNode.class); write(cnode.getQualifiedName()); write(ASEmitterTokens.MEMBER_ACCESS); write(JSGoogEmitterTokens.GOOG_BASE); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.THIS); if (fnode.isConstructor()) { writeToken(ASEmitterTokens.COMMA); write(ASEmitterTokens.SINGLE_QUOTE); write(JSGoogEmitterTokens.GOOG_CONSTRUCTOR); write(ASEmitterTokens.SINGLE_QUOTE); } if (fnode != null && !fnode.isConstructor()) { writeToken(ASEmitterTokens.COMMA); write(ASEmitterTokens.SINGLE_QUOTE); write(fnode.getName()); write(ASEmitterTokens.SINGLE_QUOTE); } IASNode[] anodes = null; boolean writeArguments = false; if (fcnode != null) { anodes = fcnode.getArgumentNodes(); writeArguments = anodes.length > 0; } else if (fnode.isConstructor()) { anodes = fnode.getParameterNodes(); writeArguments = (anodes != null && anodes.length > 0); } if (writeArguments) { int len = anodes.length; for (int i = 0; i < len; i++) { writeToken(ASEmitterTokens.COMMA); getWalker().walk(anodes[i]); } } write(ASEmitterTokens.PAREN_CLOSE); if (type == CONSTRUCTOR_FULL) { write(ASEmitterTokens.SEMICOLON); writeNewline(); } else if (type == CONSTRUCTOR_EMPTY) { write(ASEmitterTokens.SEMICOLON); } } protected void emitDefaultParameterCodeBlock(IFunctionNode node) { IParameterNode[] pnodes = node.getParameterNodes(); if (pnodes.length == 0) return; Map<Integer, IParameterNode> defaults = getDefaults(pnodes); if (defaults != null) { final StringBuilder code = new StringBuilder(); if (!hasBody(node)) { indentPush(); write(ASEmitterTokens.INDENT); } List<IParameterNode> parameters = new ArrayList<IParameterNode>( defaults.values()); for (int i = 0, n = parameters.size(); i < n; i++) { IParameterNode pnode = parameters.get(i); if (pnode != null) { code.setLength(0); /* x = typeof y !== 'undefined' ? y : z;\n */ code.append(pnode.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.EQUAL.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.TYPEOF.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(pnode.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.STRICT_NOT_EQUAL.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.SINGLE_QUOTE.getToken()); code.append(ASEmitterTokens.UNDEFINED.getToken()); code.append(ASEmitterTokens.SINGLE_QUOTE.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.TERNARY.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(pnode.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.COLON.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(pnode.getDefaultValue()); code.append(ASEmitterTokens.SEMICOLON.getToken()); write(code.toString()); if (i == n - 1 && !hasBody(node)) indentPop(); writeNewline(); } } } } private void emitRestParameterCodeBlock(IFunctionNode node) { IParameterNode[] pnodes = node.getParameterNodes(); IParameterNode rest = getRest(pnodes); if (rest != null) { final StringBuilder code = new StringBuilder(); /* x = Array.prototype.slice.call(arguments, y);\n */ code.append(rest.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.EQUAL.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(BuiltinType.ARRAY.getName()); code.append(ASEmitterTokens.MEMBER_ACCESS.getToken()); code.append(JSEmitterTokens.PROTOTYPE.getToken()); code.append(ASEmitterTokens.MEMBER_ACCESS.getToken()); code.append(JSEmitterTokens.SLICE.getToken()); code.append(ASEmitterTokens.MEMBER_ACCESS.getToken()); code.append(JSEmitterTokens.CALL.getToken()); code.append(ASEmitterTokens.PAREN_OPEN.getToken()); code.append(JSEmitterTokens.ARGUMENTS.getToken()); code.append(ASEmitterTokens.COMMA.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(String.valueOf(pnodes.length - 1)); code.append(ASEmitterTokens.PAREN_CLOSE.getToken()); code.append(ASEmitterTokens.SEMICOLON.getToken()); write(code.toString()); writeNewline(); } } @Override public void emitParameter(IParameterNode node) { getWalker().walk(node.getNameExpressionNode()); } @Override protected void emitAssignedValue(IExpressionNode node) { if (node != null) { write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); if (node.getNodeID() == ASTNodeID.ClassReferenceID) { IDefinition definition = node.resolve(getWalker().getProject()); write(definition.getQualifiedName()); } else { getWalker().walk(node); } } } @Override public void emitTypedExpression(ITypedExpressionNode node) { getWalker().walk(node.getCollectionNode()); } @Override public void emitForEachLoop(IForLoopNode node) { IContainerNode xnode = (IContainerNode) node.getChild(1); IBinaryOperatorNode bnode = (IBinaryOperatorNode) node .getConditionalsContainerNode().getChild(0); IASNode childNode = bnode.getChild(0); write(JSGoogEmitterTokens.GOOG_ARRAY_FOREACH); write(ASEmitterTokens.PAREN_OPEN); getWalker().walk(bnode.getChild(1)); writeToken(ASEmitterTokens.COMMA); writeToken(ASEmitterTokens.FUNCTION); write(ASEmitterTokens.PAREN_OPEN); if (childNode instanceof IVariableExpressionNode) write(((IVariableNode) childNode.getChild(0)).getName()); else write(((IIdentifierNode) childNode).getName()); writeToken(ASEmitterTokens.PAREN_CLOSE); if (isImplicit(xnode)) write(ASEmitterTokens.BLOCK_OPEN); getWalker().walk(node.getStatementContentsNode()); if (isImplicit(xnode)) { writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); } write(ASEmitterTokens.PAREN_CLOSE); } public JSGoogEmitter(FilterWriter out) { super(out); } protected Map<Integer, IParameterNode> getDefaults(IParameterNode[] nodes) { Map<Integer, IParameterNode> result = new HashMap<Integer, IParameterNode>(); int i = 0; boolean hasDefaults = false; for (IParameterNode node : nodes) { if (node.hasDefaultValue()) { hasDefaults = true; result.put(i, node); } else { result.put(i, null); } i++; } if (!hasDefaults) return null; return result; } private IParameterNode getRest(IParameterNode[] nodes) { for (IParameterNode node : nodes) { if (node.isRest()) return node; } return null; } private static ITypeDefinition getTypeDefinition(IDefinitionNode node) { ITypeNode tnode = (ITypeNode) node.getAncestorOfType(ITypeNode.class); return (ITypeDefinition) tnode.getDefinition(); } protected static IClassDefinition getClassDefinition(IDefinitionNode node) { IClassNode tnode = (IClassNode) node .getAncestorOfType(IClassNode.class); return (tnode != null) ? tnode.getDefinition() : null; } private static IClassDefinition getSuperClassDefinition( IDefinitionNode node, ICompilerProject project) { IClassDefinition parent = (IClassDefinition) node.getDefinition() .getParent(); IClassDefinition superClass = parent.resolveBaseClass(project); return superClass; } protected boolean hasSuperClass(IDefinitionNode node) { ICompilerProject project = getWalker().getProject(); IClassDefinition superClassDefinition = getSuperClassDefinition(node, project); // XXX (mschmalle) this is nulling for MXML super class, figure out why if (superClassDefinition == null) return false; String qname = superClassDefinition.getQualifiedName(); return superClassDefinition != null && !qname.equals(IASLanguageConstants.Object); } private boolean hasSuperCall(IScopedNode node) { for (int i = node.getChildCount() - 1; i > -1; i--) { IASNode cnode = node.getChild(i); if (cnode.getNodeID() == ASTNodeID.FunctionCallID && cnode.getChild(0).getNodeID() == ASTNodeID.SuperID) return true; } return false; } protected static boolean hasBody(IFunctionNode node) { IScopedNode scope = node.getScopedNode(); return scope.getChildCount() > 0; } protected void emitObjectDefineProperty(IAccessorNode node) { /* Object.defineProperty( A.prototype, 'foo', {get: function() {return -1;}, configurable: true} ); */ FunctionNode fn = (FunctionNode) node; fn.parseFunctionBody(getProblems()); // head write(JSGoogEmitterTokens.OBJECT); write(ASEmitterTokens.MEMBER_ACCESS); write(JSEmitterTokens.DEFINE_PROPERTY); writeNewline(ASEmitterTokens.PAREN_OPEN, true); // Type IFunctionDefinition definition = node.getDefinition(); ITypeDefinition type = (ITypeDefinition) definition.getParent(); write(type.getQualifiedName()); if (!node.hasModifier(ASModifier.STATIC)) { write(ASEmitterTokens.MEMBER_ACCESS); write(JSEmitterTokens.PROTOTYPE); } writeToken(ASEmitterTokens.COMMA); writeNewline(); // name write(ASEmitterTokens.SINGLE_QUOTE); write(definition.getBaseName()); write(ASEmitterTokens.SINGLE_QUOTE); writeToken(ASEmitterTokens.COMMA); writeNewline(); // info object // declaration write(ASEmitterTokens.BLOCK_OPEN); write(node.getNodeID() == ASTNodeID.GetterID ? ASEmitterTokens.GET : ASEmitterTokens.SET); write(ASEmitterTokens.COLON); write(ASEmitterTokens.FUNCTION); emitParameters(node.getParameterNodes()); emitMethodScope(node.getScopedNode()); writeToken(ASEmitterTokens.COMMA); write(JSEmitterTokens.CONFIGURABLE); write(ASEmitterTokens.COLON); write(ASEmitterTokens.TRUE); writeNewline(ASEmitterTokens.BLOCK_CLOSE, false); // tail, no colon; parent container will add it write(ASEmitterTokens.PAREN_CLOSE); } //-------------------------------------------------------------------------- // Operators //-------------------------------------------------------------------------- @Override public void emitNamespaceAccessExpression(INamespaceAccessExpressionNode node) { getWalker().walk(node.getLeftOperandNode()); write(ASEmitterTokens.MEMBER_ACCESS); getWalker().walk(node.getRightOperandNode()); } @Override public void emitAsOperator(IBinaryOperatorNode node) { emitBinaryOperator(node); } @Override public void emitIsOperator(IBinaryOperatorNode node) { emitBinaryOperator(node); } @Override public void emitBinaryOperator(IBinaryOperatorNode node) { if (ASNodeUtils.hasParenOpen(node)) write(ASEmitterTokens.PAREN_OPEN); ASTNodeID id = node.getNodeID(); if (id == ASTNodeID.Op_IsID) { write(ASEmitterTokens.IS); write(ASEmitterTokens.PAREN_OPEN); getWalker().walk(node.getLeftOperandNode()); writeToken(ASEmitterTokens.COMMA); getWalker().walk(node.getRightOperandNode()); write(ASEmitterTokens.PAREN_CLOSE); } else if (id == ASTNodeID.Op_AsID) { // (is(a, b) ? a : null) write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.IS); write(ASEmitterTokens.PAREN_OPEN); getWalker().walk(node.getLeftOperandNode()); writeToken(ASEmitterTokens.COMMA); getWalker().walk(node.getRightOperandNode()); writeToken(ASEmitterTokens.PAREN_CLOSE); writeToken(ASEmitterTokens.TERNARY); getWalker().walk(node.getLeftOperandNode()); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.COLON); write(ASEmitterTokens.NULL); write(ASEmitterTokens.PAREN_CLOSE); } else { getWalker().walk(node.getLeftOperandNode()); if (id != ASTNodeID.Op_CommaID) write(ASEmitterTokens.SPACE); // (erikdebruin) rewrite 'a &&= b' to 'a = a && b' if (id == ASTNodeID.Op_LogicalAndAssignID || id == ASTNodeID.Op_LogicalOrAssignID) { IIdentifierNode lnode = (IIdentifierNode) node .getLeftOperandNode(); writeToken(ASEmitterTokens.EQUAL); writeToken(lnode.getName()); write((id == ASTNodeID.Op_LogicalAndAssignID) ? ASEmitterTokens.LOGICAL_AND : ASEmitterTokens.LOGICAL_OR); } else { write(node.getOperator().getOperatorText()); } write(ASEmitterTokens.SPACE); getWalker().walk(node.getRightOperandNode()); } if (ASNodeUtils.hasParenOpen(node)) write(ASEmitterTokens.PAREN_CLOSE); } //-------------------------------------------------------------------------- // //-------------------------------------------------------------------------- private List<String> resolveImports(ITypeDefinition type) { ArrayList<String> list = new ArrayList<String>(); IScopedNode scopeNode = type.getContainedScope().getScopeNode(); if (scopeNode != null) { scopeNode.getAllImports(list); } else { // MXML ClassDefinition cdefinition = (ClassDefinition) type; String[] implicitImports = cdefinition.getImplicitImports(); for (String imp : implicitImports) { list.add(imp); } } return list; } }
compiler.jx/src/org/apache/flex/compiler/internal/codegen/js/goog/JSGoogEmitter.java
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.internal.codegen.js.goog; import java.io.FilterWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.flex.compiler.codegen.IASGlobalFunctionConstants.BuiltinType; import org.apache.flex.compiler.codegen.IDocEmitter; import org.apache.flex.compiler.codegen.js.goog.IJSGoogDocEmitter; import org.apache.flex.compiler.codegen.js.goog.IJSGoogEmitter; import org.apache.flex.compiler.common.ASModifier; import org.apache.flex.compiler.common.ModifiersSet; import org.apache.flex.compiler.constants.IASLanguageConstants; import org.apache.flex.compiler.definitions.IClassDefinition; import org.apache.flex.compiler.definitions.IDefinition; import org.apache.flex.compiler.definitions.IFunctionDefinition; import org.apache.flex.compiler.definitions.IPackageDefinition; import org.apache.flex.compiler.definitions.ITypeDefinition; import org.apache.flex.compiler.internal.codegen.as.ASEmitterTokens; import org.apache.flex.compiler.internal.codegen.js.JSEmitter; import org.apache.flex.compiler.internal.codegen.js.JSEmitterTokens; import org.apache.flex.compiler.internal.definitions.ClassDefinition; import org.apache.flex.compiler.internal.scopes.PackageScope; import org.apache.flex.compiler.internal.tree.as.ChainedVariableNode; import org.apache.flex.compiler.internal.tree.as.FunctionCallNode; import org.apache.flex.compiler.internal.tree.as.FunctionNode; import org.apache.flex.compiler.problems.ICompilerProblem; import org.apache.flex.compiler.projects.ICompilerProject; import org.apache.flex.compiler.scopes.IASScope; import org.apache.flex.compiler.tree.ASTNodeID; import org.apache.flex.compiler.tree.as.IASNode; import org.apache.flex.compiler.tree.as.IAccessorNode; import org.apache.flex.compiler.tree.as.IBinaryOperatorNode; import org.apache.flex.compiler.tree.as.IClassNode; import org.apache.flex.compiler.tree.as.IContainerNode; import org.apache.flex.compiler.tree.as.IDefinitionNode; import org.apache.flex.compiler.tree.as.IExpressionNode; import org.apache.flex.compiler.tree.as.IForLoopNode; import org.apache.flex.compiler.tree.as.IFunctionCallNode; import org.apache.flex.compiler.tree.as.IFunctionNode; import org.apache.flex.compiler.tree.as.IGetterNode; import org.apache.flex.compiler.tree.as.IIdentifierNode; import org.apache.flex.compiler.tree.as.IInterfaceNode; import org.apache.flex.compiler.tree.as.INamespaceAccessExpressionNode; import org.apache.flex.compiler.tree.as.IParameterNode; import org.apache.flex.compiler.tree.as.IScopedNode; import org.apache.flex.compiler.tree.as.ISetterNode; import org.apache.flex.compiler.tree.as.ITypeNode; import org.apache.flex.compiler.tree.as.ITypedExpressionNode; import org.apache.flex.compiler.tree.as.IVariableExpressionNode; import org.apache.flex.compiler.tree.as.IVariableNode; import org.apache.flex.compiler.utils.ASNodeUtils; /** * Concrete implementation of the 'goog' JavaScript production. * * @author Michael Schmalle * @author Erik de Bruin */ public class JSGoogEmitter extends JSEmitter implements IJSGoogEmitter { protected static final String CONSTRUCTOR_EMPTY = "emptyConstructor"; protected static final String CONSTRUCTOR_FULL = "fullConstructor"; protected static final String SUPER_FUNCTION_CALL = "replaceSuperFunction"; protected List<String> propertyNames = new ArrayList<String>(); protected ICompilerProject project; protected IJSGoogDocEmitter getDoc() { return (IJSGoogDocEmitter) getDocEmitter(); } @Override public IDocEmitter getDocEmitter() { return new JSGoogDocEmitter(this); } //-------------------------------------------------------------------------- // //-------------------------------------------------------------------------- @Override public void emitPackageHeader(IPackageDefinition definition) { IASScope containedScope = definition.getContainedScope(); ITypeDefinition type = findType(containedScope.getAllLocalDefinitions()); if (type == null) return; /* goog.provide('x');\n\n */ write(JSGoogEmitterTokens.GOOG_PROVIDE); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.SINGLE_QUOTE); write(type.getQualifiedName()); write(ASEmitterTokens.SINGLE_QUOTE); write(ASEmitterTokens.PAREN_CLOSE); writeNewline(ASEmitterTokens.SEMICOLON); writeNewline(); } @Override public void emitPackageHeaderContents(IPackageDefinition definition) { PackageScope containedScope = (PackageScope) definition .getContainedScope(); ITypeDefinition type = findType(containedScope.getAllLocalDefinitions()); if (type == null) return; List<String> list = resolveImports(type); for (String imp : list) { if (imp.indexOf(JSGoogEmitterTokens.AS3.getToken()) != -1) continue; /* goog.require('x');\n */ write(JSGoogEmitterTokens.GOOG_REQUIRE); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.SINGLE_QUOTE); write(imp); write(ASEmitterTokens.SINGLE_QUOTE); write(ASEmitterTokens.PAREN_CLOSE); writeNewline(ASEmitterTokens.SEMICOLON); } // (erikdebruin) only write 'closing' line break when there are // actually imports... if (list.size() > 1 || (list.size() == 1 && list.get(0).indexOf( JSGoogEmitterTokens.AS3.getToken()) == -1)) { writeNewline(); } } @Override public void emitPackageContents(IPackageDefinition definition) { IASScope containedScope = definition.getContainedScope(); ITypeDefinition type = findType(containedScope.getAllLocalDefinitions()); if (type == null) return; ITypeNode tnode = findTypeNode(definition.getNode()); if (tnode != null) { getWalker().walk(tnode); // IClassNode | IInterfaceNode } } @Override public void emitPackageFooter(IPackageDefinition definition) { } //-------------------------------------------------------------------------- // //-------------------------------------------------------------------------- @Override public void emitClass(IClassNode node) { IClassDefinition definition = node.getDefinition(); IFunctionDefinition ctorDefinition = definition.getConstructor(); // Static-only (Singleton) classes may not have a constructor if (ctorDefinition != null) { IFunctionNode ctorNode = (IFunctionNode) ctorDefinition.getNode(); if (ctorNode != null) { // constructor emitMethod(ctorNode); write(ASEmitterTokens.SEMICOLON); } else { String qname = definition.getQualifiedName(); if (qname != null && !qname.equals("")) { write(qname); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.PAREN_CLOSE); write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); write(ASEmitterTokens.SEMICOLON); } } } IDefinitionNode[] dnodes = node.getAllMemberNodes(); for (IDefinitionNode dnode : dnodes) { if (dnode.getNodeID() == ASTNodeID.VariableID) { writeNewline(); writeNewline(); emitField((IVariableNode) dnode); write(ASEmitterTokens.SEMICOLON); } else if (dnode.getNodeID() == ASTNodeID.FunctionID) { if (!((IFunctionNode) dnode).isConstructor()) { writeNewline(); writeNewline(); emitMethod((IFunctionNode) dnode); write(ASEmitterTokens.SEMICOLON); } } else if (dnode.getNodeID() == ASTNodeID.GetterID || dnode.getNodeID() == ASTNodeID.SetterID) { writeNewline(); writeNewline(); emitAccessors((IAccessorNode) dnode); write(ASEmitterTokens.SEMICOLON); } } } @Override public void emitInterface(IInterfaceNode node) { ICompilerProject project = getWalker().getProject(); getDoc().emitInterfaceDoc(node, project); String qname = node.getQualifiedName(); if (qname != null && !qname.equals("")) { write(qname); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.PAREN_CLOSE); write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); write(ASEmitterTokens.SEMICOLON); } final IDefinitionNode[] members = node.getAllMemberDefinitionNodes(); for (IDefinitionNode mnode : members) { boolean isAccessor = mnode.getNodeID() == ASTNodeID.GetterID || mnode.getNodeID() == ASTNodeID.SetterID; if (!isAccessor || !propertyNames.contains(qname)) { writeNewline(); write(qname); write(ASEmitterTokens.MEMBER_ACCESS); write(JSEmitterTokens.PROTOTYPE); write(ASEmitterTokens.MEMBER_ACCESS); write(mnode.getQualifiedName()); if (isAccessor && !propertyNames.contains(qname)) { propertyNames.add(qname); } else { write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); emitParameters(((IFunctionNode) mnode).getParameterNodes()); write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); } write(ASEmitterTokens.SEMICOLON); } } } @Override public void emitField(IVariableNode node) { IClassDefinition definition = getClassDefinition(node); IDefinition def = null; IExpressionNode enode = node.getVariableTypeNode();//getAssignedValueNode(); if (enode != null) def = enode.resolveType(getWalker().getProject()); getDoc().emitFieldDoc(node, def); /* x.prototype.y = z */ ModifiersSet modifierSet = node.getDefinition().getModifiers(); String root = ""; if (modifierSet != null && !modifierSet.hasModifier(ASModifier.STATIC)) { root = JSEmitterTokens.PROTOTYPE.getToken(); root += ASEmitterTokens.MEMBER_ACCESS.getToken(); } write(definition.getQualifiedName() + ASEmitterTokens.MEMBER_ACCESS.getToken() + root + node.getName()); IExpressionNode vnode = node.getAssignedValueNode(); if (vnode != null) { write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); getWalker().walk(vnode); } if (!(node instanceof ChainedVariableNode)) { int len = node.getChildCount(); for (int i = 0; i < len; i++) { IASNode child = node.getChild(i); if (child instanceof ChainedVariableNode) { writeNewline(ASEmitterTokens.SEMICOLON); writeNewline(); emitField((IVariableNode) child); } } } } @Override public void emitVarDeclaration(IVariableNode node) { if (!(node instanceof ChainedVariableNode) && !node.isConst()) { emitMemberKeyword(node); } IExpressionNode avnode = node.getAssignedValueNode(); if (avnode != null) { IDefinition def = avnode.resolveType(getWalker().getProject()); String opcode = avnode.getNodeID().getParaphrase(); if (opcode != "AnonymousFunction") getDoc().emitVarDoc(node, def); } else { getDoc().emitVarDoc(node, null); } emitDeclarationName(node); emitAssignedValue(avnode); if (!(node instanceof ChainedVariableNode)) { // check for chained variables int len = node.getChildCount(); for (int i = 0; i < len; i++) { IASNode child = node.getChild(i); if (child instanceof ChainedVariableNode) { writeToken(ASEmitterTokens.COMMA); emitVarDeclaration((IVariableNode) child); } } } } @Override public void emitGetAccessor(IGetterNode node) { emitObjectDefineProperty(node); } @Override public void emitSetAccessor(ISetterNode node) { emitObjectDefineProperty(node); } protected void emitAccessors(IAccessorNode node) { String qname = node.getQualifiedName(); if (!propertyNames.contains(qname)) { emitField(node); write(ASEmitterTokens.SEMICOLON); writeNewline(); writeNewline(); propertyNames.add(qname); } if (node.getNodeID() == ASTNodeID.GetterID) { emitGetAccessor((IGetterNode) node); } else if (node.getNodeID() == ASTNodeID.SetterID) { emitSetAccessor((ISetterNode) node); } } @Override public void emitMethod(IFunctionNode node) { FunctionNode fn = (FunctionNode) node; fn.parseFunctionBody(new ArrayList<ICompilerProblem>()); ICompilerProject project = getWalker().getProject(); getDoc().emitMethodDoc(node, project); boolean isConstructor = node.isConstructor(); String qname = getTypeDefinition(node).getQualifiedName(); if (qname != null && !qname.equals("")) { write(qname); if (!isConstructor) { write(ASEmitterTokens.MEMBER_ACCESS); if (!fn.hasModifier(ASModifier.STATIC)) { write(JSEmitterTokens.PROTOTYPE); write(ASEmitterTokens.MEMBER_ACCESS); } } } if (!isConstructor) emitMemberName(node); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.FUNCTION); emitParameters(node.getParameterNodes()); boolean hasSuperClass = hasSuperClass(node); if (isConstructor && node.getScopedNode().getChildCount() == 0) { write(ASEmitterTokens.SPACE); write(ASEmitterTokens.BLOCK_OPEN); if (hasSuperClass) emitSuperCall(node, CONSTRUCTOR_EMPTY); writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); } if (!isConstructor || node.getScopedNode().getChildCount() > 0) emitMethodScope(node.getScopedNode()); if (isConstructor && hasSuperClass) { writeNewline(); write(JSGoogEmitterTokens.GOOG_INHERITS); write(ASEmitterTokens.PAREN_OPEN); write(qname); writeToken(ASEmitterTokens.COMMA); String sname = getSuperClassDefinition(node, project) .getQualifiedName(); write(sname); write(ASEmitterTokens.PAREN_CLOSE); } } @Override public void emitFunctionCall(IFunctionCallNode node) { IASNode cnode = node.getChild(0); if (cnode.getNodeID() == ASTNodeID.MemberAccessExpressionID) cnode = cnode.getChild(0); ASTNodeID id = cnode.getNodeID(); if (id != ASTNodeID.SuperID) { if (node.isNewExpression()) { writeToken(ASEmitterTokens.NEW); } getWalker().walk(node.getNameNode()); write(ASEmitterTokens.PAREN_OPEN); walkArguments(node.getArgumentNodes()); write(ASEmitterTokens.PAREN_CLOSE); } else { emitSuperCall(node, SUPER_FUNCTION_CALL); } } @Override public void emitIdentifier(IIdentifierNode node) { ICompilerProject project = getWalker().getProject(); IClassNode cnode = (IClassNode) node .getAncestorOfType(IClassNode.class); IDefinition def = ((IIdentifierNode) node).resolve(project); ITypeDefinition type = ((IIdentifierNode) node).resolveType(project); IASNode pnode = node.getParent(); ASTNodeID inode = pnode.getNodeID(); boolean writeSelf = false; if (cnode != null) { IDefinitionNode[] members = cnode.getAllMemberNodes(); for (IDefinitionNode mnode : members) { if ((type != null && type.getQualifiedName().equalsIgnoreCase( IASLanguageConstants.Function)) || (def != null && def.getQualifiedName() .equalsIgnoreCase(mnode.getQualifiedName()))) { if (!(pnode instanceof FunctionNode) && inode != ASTNodeID.MemberAccessExpressionID) { writeSelf = true; break; } else if (inode == ASTNodeID.MemberAccessExpressionID && !def.isStatic()) { String tname = type.getQualifiedName(); writeSelf = !tname.equalsIgnoreCase(cnode .getQualifiedName()) && !tname.equals(IASLanguageConstants.Function); break; } } } } boolean isRunningInTestMode = cnode != null && cnode.getQualifiedName().equalsIgnoreCase("FalconTest_A"); if (writeSelf && !isRunningInTestMode) { write(JSGoogEmitterTokens.SELF); write(ASEmitterTokens.MEMBER_ACCESS); } else { String pname = (type != null) ? type.getPackageName() : ""; if (cnode != null && pname != "" && !pname.equalsIgnoreCase(cnode.getPackageName()) && inode != ASTNodeID.ArgumentID && inode != ASTNodeID.VariableID && inode != ASTNodeID.TypedExpressionID) { write(pname); write(ASEmitterTokens.MEMBER_ACCESS); } } super.emitIdentifier(node); } @Override public void emitFunctionBlockHeader(IFunctionNode node) { IDefinition def = node.getDefinition(); boolean isStatic = false; if (def != null && def.isStatic()) isStatic = true; boolean isLocal = false; if (node.getFunctionClassification() == IFunctionDefinition.FunctionClassification.LOCAL) isLocal = true; if (hasBody(node) && !isStatic && !isLocal) emitSelfReference(node); if (node.isConstructor() && hasSuperClass(node) && !hasSuperCall(node.getScopedNode())) emitSuperCall(node, CONSTRUCTOR_FULL); emitRestParameterCodeBlock(node); emitDefaultParameterCodeBlock(node); } protected void emitSelfReference(IFunctionNode node) { writeToken(ASEmitterTokens.VAR); writeToken(JSGoogEmitterTokens.SELF); writeToken(ASEmitterTokens.EQUAL); write(ASEmitterTokens.THIS); writeNewline(ASEmitterTokens.SEMICOLON); } protected void emitSuperCall(IASNode node, String type) { IFunctionNode fnode = (node instanceof IFunctionNode) ? (IFunctionNode) node : null; IFunctionCallNode fcnode = (node instanceof IFunctionCallNode) ? (FunctionCallNode) node : null; if (type == CONSTRUCTOR_EMPTY) { indentPush(); writeNewline(); indentPop(); } else if (type == SUPER_FUNCTION_CALL) { if (fnode == null) fnode = (IFunctionNode) fcnode .getAncestorOfType(IFunctionNode.class); } if (fnode.isConstructor() && !hasSuperClass(fnode)) return; IClassNode cnode = (IClassNode) node .getAncestorOfType(IClassNode.class); write(cnode.getQualifiedName()); write(ASEmitterTokens.MEMBER_ACCESS); write(JSGoogEmitterTokens.GOOG_BASE); write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.THIS); if (fnode.isConstructor()) { writeToken(ASEmitterTokens.COMMA); write(ASEmitterTokens.SINGLE_QUOTE); write(JSGoogEmitterTokens.GOOG_CONSTRUCTOR); write(ASEmitterTokens.SINGLE_QUOTE); } if (fnode != null && !fnode.isConstructor()) { writeToken(ASEmitterTokens.COMMA); write(ASEmitterTokens.SINGLE_QUOTE); write(fnode.getName()); write(ASEmitterTokens.SINGLE_QUOTE); } IASNode[] anodes = null; boolean writeArguments = false; if (fcnode != null) { anodes = fcnode.getArgumentNodes(); writeArguments = anodes.length > 0; } else if (fnode.isConstructor()) { anodes = fnode.getParameterNodes(); writeArguments = (anodes != null && anodes.length > 0); } if (writeArguments) { int len = anodes.length; for (int i = 0; i < len; i++) { writeToken(ASEmitterTokens.COMMA); getWalker().walk(anodes[i]); } } write(ASEmitterTokens.PAREN_CLOSE); if (type == CONSTRUCTOR_FULL) { write(ASEmitterTokens.SEMICOLON); writeNewline(); } else if (type == CONSTRUCTOR_EMPTY) { write(ASEmitterTokens.SEMICOLON); } } protected void emitDefaultParameterCodeBlock(IFunctionNode node) { IParameterNode[] pnodes = node.getParameterNodes(); if (pnodes.length == 0) return; Map<Integer, IParameterNode> defaults = getDefaults(pnodes); if (defaults != null) { final StringBuilder code = new StringBuilder(); if (!hasBody(node)) { indentPush(); write(ASEmitterTokens.INDENT); } List<IParameterNode> parameters = new ArrayList<IParameterNode>( defaults.values()); for (int i = 0, n = parameters.size(); i < n; i++) { IParameterNode pnode = parameters.get(i); if (pnode != null) { code.setLength(0); /* x = typeof y !== 'undefined' ? y : z;\n */ code.append(pnode.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.EQUAL.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.TYPEOF.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(pnode.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.STRICT_NOT_EQUAL.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.SINGLE_QUOTE.getToken()); code.append(ASEmitterTokens.UNDEFINED.getToken()); code.append(ASEmitterTokens.SINGLE_QUOTE.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.TERNARY.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(pnode.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.COLON.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(pnode.getDefaultValue()); code.append(ASEmitterTokens.SEMICOLON.getToken()); write(code.toString()); if (i == n - 1 && !hasBody(node)) indentPop(); writeNewline(); } } } } private void emitRestParameterCodeBlock(IFunctionNode node) { IParameterNode[] pnodes = node.getParameterNodes(); IParameterNode rest = getRest(pnodes); if (rest != null) { final StringBuilder code = new StringBuilder(); /* x = Array.prototype.slice.call(arguments, y);\n */ code.append(rest.getName()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(ASEmitterTokens.EQUAL.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(BuiltinType.ARRAY.getName()); code.append(ASEmitterTokens.MEMBER_ACCESS.getToken()); code.append(JSEmitterTokens.PROTOTYPE.getToken()); code.append(ASEmitterTokens.MEMBER_ACCESS.getToken()); code.append(JSEmitterTokens.SLICE.getToken()); code.append(ASEmitterTokens.MEMBER_ACCESS.getToken()); code.append(JSEmitterTokens.CALL.getToken()); code.append(ASEmitterTokens.PAREN_OPEN.getToken()); code.append(JSEmitterTokens.ARGUMENTS.getToken()); code.append(ASEmitterTokens.COMMA.getToken()); code.append(ASEmitterTokens.SPACE.getToken()); code.append(String.valueOf(pnodes.length - 1)); code.append(ASEmitterTokens.PAREN_CLOSE.getToken()); code.append(ASEmitterTokens.SEMICOLON.getToken()); write(code.toString()); writeNewline(); } } @Override public void emitParameter(IParameterNode node) { getWalker().walk(node.getNameExpressionNode()); } @Override protected void emitAssignedValue(IExpressionNode node) { if (node != null) { write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.EQUAL); if (node.getNodeID() == ASTNodeID.ClassReferenceID) { IDefinition definition = node.resolve(getWalker().getProject()); write(definition.getQualifiedName()); } else { getWalker().walk(node); } } } @Override public void emitTypedExpression(ITypedExpressionNode node) { getWalker().walk(node.getCollectionNode()); } @Override public void emitForEachLoop(IForLoopNode node) { IContainerNode xnode = (IContainerNode) node.getChild(1); IBinaryOperatorNode bnode = (IBinaryOperatorNode) node .getConditionalsContainerNode().getChild(0); IASNode childNode = bnode.getChild(0); write(JSGoogEmitterTokens.GOOG_ARRAY_FOREACH); write(ASEmitterTokens.PAREN_OPEN); getWalker().walk(bnode.getChild(1)); writeToken(ASEmitterTokens.COMMA); writeToken(ASEmitterTokens.FUNCTION); write(ASEmitterTokens.PAREN_OPEN); if (childNode instanceof IVariableExpressionNode) write(((IVariableNode) childNode.getChild(0)).getName()); else write(((IIdentifierNode) childNode).getName()); writeToken(ASEmitterTokens.PAREN_CLOSE); if (isImplicit(xnode)) write(ASEmitterTokens.BLOCK_OPEN); getWalker().walk(node.getStatementContentsNode()); if (isImplicit(xnode)) { writeNewline(); write(ASEmitterTokens.BLOCK_CLOSE); } write(ASEmitterTokens.PAREN_CLOSE); } public JSGoogEmitter(FilterWriter out) { super(out); } protected Map<Integer, IParameterNode> getDefaults(IParameterNode[] nodes) { Map<Integer, IParameterNode> result = new HashMap<Integer, IParameterNode>(); int i = 0; boolean hasDefaults = false; for (IParameterNode node : nodes) { if (node.hasDefaultValue()) { hasDefaults = true; result.put(i, node); } else { result.put(i, null); } i++; } if (!hasDefaults) return null; return result; } private IParameterNode getRest(IParameterNode[] nodes) { for (IParameterNode node : nodes) { if (node.isRest()) return node; } return null; } private static ITypeDefinition getTypeDefinition(IDefinitionNode node) { ITypeNode tnode = (ITypeNode) node.getAncestorOfType(ITypeNode.class); return (ITypeDefinition) tnode.getDefinition(); } protected static IClassDefinition getClassDefinition(IDefinitionNode node) { IClassNode tnode = (IClassNode) node .getAncestorOfType(IClassNode.class); return (tnode != null) ? tnode.getDefinition() : null; } private static IClassDefinition getSuperClassDefinition( IDefinitionNode node, ICompilerProject project) { IClassDefinition parent = (IClassDefinition) node.getDefinition() .getParent(); IClassDefinition superClass = parent.resolveBaseClass(project); return superClass; } protected boolean hasSuperClass(IDefinitionNode node) { ICompilerProject project = getWalker().getProject(); IClassDefinition superClassDefinition = getSuperClassDefinition(node, project); // XXX (mschmalle) this is nulling for MXML super class, figure out why if (superClassDefinition == null) return false; String qname = superClassDefinition.getQualifiedName(); return superClassDefinition != null && !qname.equals(IASLanguageConstants.Object); } private boolean hasSuperCall(IScopedNode node) { for (int i = node.getChildCount() - 1; i > -1; i--) { IASNode cnode = node.getChild(i); if (cnode.getNodeID() == ASTNodeID.FunctionCallID && cnode.getChild(0).getNodeID() == ASTNodeID.SuperID) return true; } return false; } protected static boolean hasBody(IFunctionNode node) { IScopedNode scope = node.getScopedNode(); return scope.getChildCount() > 0; } protected void emitObjectDefineProperty(IAccessorNode node) { /* Object.defineProperty( A.prototype, 'foo', {get: function() {return -1;}, configurable: true} ); */ FunctionNode fn = (FunctionNode) node; fn.parseFunctionBody(getProblems()); // head write(JSGoogEmitterTokens.OBJECT); write(ASEmitterTokens.MEMBER_ACCESS); write(JSEmitterTokens.DEFINE_PROPERTY); writeNewline(ASEmitterTokens.PAREN_OPEN, true); // Type IFunctionDefinition definition = node.getDefinition(); ITypeDefinition type = (ITypeDefinition) definition.getParent(); write(type.getQualifiedName()); if (!node.hasModifier(ASModifier.STATIC)) { write(ASEmitterTokens.MEMBER_ACCESS); write(JSEmitterTokens.PROTOTYPE); } writeToken(ASEmitterTokens.COMMA); writeNewline(); // name write(ASEmitterTokens.SINGLE_QUOTE); write(definition.getBaseName()); write(ASEmitterTokens.SINGLE_QUOTE); writeToken(ASEmitterTokens.COMMA); writeNewline(); // info object // declaration write(ASEmitterTokens.BLOCK_OPEN); write(node.getNodeID() == ASTNodeID.GetterID ? ASEmitterTokens.GET : ASEmitterTokens.SET); write(ASEmitterTokens.COLON); write(ASEmitterTokens.FUNCTION); emitParameters(node.getParameterNodes()); emitMethodScope(node.getScopedNode()); writeToken(ASEmitterTokens.COMMA); write(JSEmitterTokens.CONFIGURABLE); write(ASEmitterTokens.COLON); write(ASEmitterTokens.TRUE); writeNewline(ASEmitterTokens.BLOCK_CLOSE, false); // tail, no colon; parent container will add it write(ASEmitterTokens.PAREN_CLOSE); } //-------------------------------------------------------------------------- // Operators //-------------------------------------------------------------------------- @Override public void emitNamespaceAccessExpression(INamespaceAccessExpressionNode node) { getWalker().walk(node.getLeftOperandNode()); write(ASEmitterTokens.MEMBER_ACCESS); getWalker().walk(node.getRightOperandNode()); } @Override public void emitAsOperator(IBinaryOperatorNode node) { emitBinaryOperator(node); } @Override public void emitIsOperator(IBinaryOperatorNode node) { emitBinaryOperator(node); } @Override public void emitBinaryOperator(IBinaryOperatorNode node) { if (ASNodeUtils.hasParenOpen(node)) write(ASEmitterTokens.PAREN_OPEN); ASTNodeID id = node.getNodeID(); if (id == ASTNodeID.Op_IsID) { write(ASEmitterTokens.IS); write(ASEmitterTokens.PAREN_OPEN); getWalker().walk(node.getLeftOperandNode()); writeToken(ASEmitterTokens.COMMA); getWalker().walk(node.getRightOperandNode()); write(ASEmitterTokens.PAREN_CLOSE); } else if (id == ASTNodeID.Op_AsID) { // (is(a, b) ? a : null) write(ASEmitterTokens.PAREN_OPEN); write(ASEmitterTokens.IS); write(ASEmitterTokens.PAREN_OPEN); getWalker().walk(node.getLeftOperandNode()); writeToken(ASEmitterTokens.COMMA); getWalker().walk(node.getRightOperandNode()); writeToken(ASEmitterTokens.PAREN_CLOSE); writeToken(ASEmitterTokens.TERNARY); getWalker().walk(node.getLeftOperandNode()); write(ASEmitterTokens.SPACE); writeToken(ASEmitterTokens.COLON); write(ASEmitterTokens.NULL); write(ASEmitterTokens.PAREN_CLOSE); } else { getWalker().walk(node.getLeftOperandNode()); if (id != ASTNodeID.Op_CommaID) write(ASEmitterTokens.SPACE); // (erikdebruin) rewrite 'a &&= b' to 'a = a && b' if (id == ASTNodeID.Op_LogicalAndAssignID || id == ASTNodeID.Op_LogicalOrAssignID) { IIdentifierNode lnode = (IIdentifierNode) node .getLeftOperandNode(); writeToken(ASEmitterTokens.EQUAL); writeToken(lnode.getName()); write((id == ASTNodeID.Op_LogicalAndAssignID) ? ASEmitterTokens.LOGICAL_AND : ASEmitterTokens.LOGICAL_OR); } else { write(node.getOperator().getOperatorText()); } write(ASEmitterTokens.SPACE); getWalker().walk(node.getRightOperandNode()); } if (ASNodeUtils.hasParenOpen(node)) write(ASEmitterTokens.PAREN_CLOSE); } //-------------------------------------------------------------------------- // //-------------------------------------------------------------------------- private List<String> resolveImports(ITypeDefinition type) { ArrayList<String> list = new ArrayList<String>(); IScopedNode scopeNode = type.getContainedScope().getScopeNode(); if (scopeNode != null) { scopeNode.getAllImports(list); } else { // MXML ClassDefinition cdefinition = (ClassDefinition) type; String[] implicitImports = cdefinition.getImplicitImports(); for (String imp : implicitImports) { list.add(imp); } } return list; } }
Avoid an error when encountering an Embed node Signed-off-by: Erik de Bruin <[email protected]>
compiler.jx/src/org/apache/flex/compiler/internal/codegen/js/goog/JSGoogEmitter.java
Avoid an error when encountering an Embed node
Java
apache-2.0
bf462a791fbbd8852efc06e9eee3b21f5092fd4b
0
Servoy/wicket,mafulafunk/wicket,dashorst/wicket,mosoft521/wicket,AlienQueen/wicket,AlienQueen/wicket,topicusonderwijs/wicket,topicusonderwijs/wicket,topicusonderwijs/wicket,bitstorm/wicket,apache/wicket,dashorst/wicket,freiheit-com/wicket,mosoft521/wicket,aldaris/wicket,dashorst/wicket,klopfdreh/wicket,martin-g/wicket-osgi,AlienQueen/wicket,bitstorm/wicket,dashorst/wicket,freiheit-com/wicket,apache/wicket,aldaris/wicket,astrapi69/wicket,zwsong/wicket,AlienQueen/wicket,martin-g/wicket-osgi,bitstorm/wicket,mosoft521/wicket,aldaris/wicket,selckin/wicket,selckin/wicket,freiheit-com/wicket,apache/wicket,Servoy/wicket,klopfdreh/wicket,mosoft521/wicket,zwsong/wicket,klopfdreh/wicket,aldaris/wicket,bitstorm/wicket,Servoy/wicket,mosoft521/wicket,selckin/wicket,martin-g/wicket-osgi,topicusonderwijs/wicket,mafulafunk/wicket,freiheit-com/wicket,apache/wicket,selckin/wicket,Servoy/wicket,zwsong/wicket,apache/wicket,AlienQueen/wicket,Servoy/wicket,klopfdreh/wicket,astrapi69/wicket,astrapi69/wicket,topicusonderwijs/wicket,freiheit-com/wicket,klopfdreh/wicket,mafulafunk/wicket,zwsong/wicket,dashorst/wicket,aldaris/wicket,bitstorm/wicket,selckin/wicket,astrapi69/wicket
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.repeater; import java.util.Iterator; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.wicket.Application; import org.apache.wicket.Component; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.model.IModel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Base class for repeaters. This container renders each of its children using its own markup. * * The children are collected using {@link #renderIterator()} method. This class will take care of * properly positioning and rewinding its markup stream so before each child renders it points to * the beginning of this component. Each child is rendered by a call to * {@link #renderChild(Component)}. A typical implementation simply does * <code>child.render(getMarkupStream());</code>. * * @author Igor Vaynberg (ivaynberg) */ public abstract class AbstractRepeater extends WebMarkupContainer { private static final long serialVersionUID = 1L; private static final Logger log = LoggerFactory.getLogger(AbstractRepeater.class); private static Pattern SAFE_CHILD_ID_PATTERN = Pattern.compile("^\\d+$"); /** * Constructor * * @param id */ public AbstractRepeater(String id) { super(id); } /** * Constructor * * @param id * @param model */ public AbstractRepeater(String id, IModel model) { super(id, model); } /** * Returns an iterator for the collection of child components to be rendered. Users can override * this to change order of rendered children. * * @return iterator over child components to be rendered */ protected abstract Iterator renderIterator(); /** * Renders all child items in no specified order * * @param markupStream * The markup stream */ protected final void onRender(final MarkupStream markupStream) { final int markupStart = markupStream.getCurrentIndex(); Iterator it = renderIterator(); if (it.hasNext()) { do { Component child = (Component)it.next(); if (child == null) { throw new IllegalStateException("the render iterator returned null for a child"); } markupStream.setCurrentIndex(markupStart); renderChild(child); } while (it.hasNext()); } else { markupStream.skipComponent(); } } /** * Render a single child. This method can be overridden to modify how a single child component * is rendered. * * @param child * Child component to be rendered */ protected void renderChild(final Component child) { child.render(getMarkupStream()); } /** * @see org.apache.wicket.Component#onBeforeRender() */ protected void onBeforeRender() { onPopulate(); if (Application.get().getConfigurationType().equals(Application.DEVELOPMENT)) { Iterator i = iterator(); while (i.hasNext()) { Component c = (Component)i.next(); Matcher matcher = SAFE_CHILD_ID_PATTERN.matcher(c.getId()); if (!matcher.matches()) { log.warn("Child component of repeater " + getClass().getName() + ":" + getId() + " has a non-safe child id of " + c.getId() + ". Safe child ids must be composed of digits only."); // do not flood the log break; } } } super.onBeforeRender(); } /** * Callback to let the repeater know it should populate itself with its items. */ protected abstract void onPopulate(); }
jdk-1.4/wicket/src/main/java/org/apache/wicket/markup/repeater/AbstractRepeater.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.repeater; import java.util.Iterator; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.wicket.Component; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.model.IModel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Base class for repeaters. This container renders each of its children using its own markup. * * The children are collected using {@link #renderIterator()} method. This class will take care of * properly positioning and rewinding its markup stream so before each child renders it points to * the beginning of this component. Each child is rendered by a call to * {@link #renderChild(Component)}. A typical implementation simply does * <code>child.render(getMarkupStream());</code>. * * @author Igor Vaynberg (ivaynberg) */ public abstract class AbstractRepeater extends WebMarkupContainer { private static final long serialVersionUID = 1L; private static final Logger log = LoggerFactory.getLogger(AbstractRepeater.class); private static Pattern SAFE_CHILD_ID_PATTERN = Pattern.compile("^\\d+$"); /** * Constructor * * @param id */ public AbstractRepeater(String id) { super(id); } /** * Constructor * * @param id * @param model */ public AbstractRepeater(String id, IModel model) { super(id, model); } /** * Returns an iterator for the collection of child components to be rendered. Users can override * this to change order of rendered children. * * @return iterator over child components to be rendered */ protected abstract Iterator renderIterator(); /** * Renders all child items in no specified order * * @param markupStream * The markup stream */ protected final void onRender(final MarkupStream markupStream) { final int markupStart = markupStream.getCurrentIndex(); Iterator it = renderIterator(); if (it.hasNext()) { do { Component child = (Component)it.next(); if (child == null) { throw new IllegalStateException("the render iterator returned null for a child"); } markupStream.setCurrentIndex(markupStart); renderChild(child); } while (it.hasNext()); } else { markupStream.skipComponent(); } } /** * Render a single child. This method can be overridden to modify how a single child component * is rendered. * * @param child * Child component to be rendered */ protected void renderChild(final Component child) { child.render(getMarkupStream()); } /** * @see org.apache.wicket.Component#onBeforeRender() */ protected void onBeforeRender() { onPopulate(); // TODO possibly enable this only in development mode Iterator i = iterator(); while (i.hasNext()) { Component c = (Component)i.next(); Matcher matcher = SAFE_CHILD_ID_PATTERN.matcher(c.getId()); if (!matcher.matches()) { log.warn("Child component of repeater " + getClass().getName() + ":" + getId() + " has a non-safe child id of " + c.getId() + ". Safe child ids must be composed of digits only."); // do not flood the log break; } } super.onBeforeRender(); } /** * Callback to let the repeater know it should populate itself with its items. */ protected abstract void onPopulate(); }
implemented TODO so the check is only done in development git-svn-id: 5a74b5304d8e7e474561603514f78b697e5d94c4@589850 13f79535-47bb-0310-9956-ffa450edef68
jdk-1.4/wicket/src/main/java/org/apache/wicket/markup/repeater/AbstractRepeater.java
implemented TODO so the check is only done in development
Java
apache-2.0
8b20dfe38f420420624807beeeba6676bcee1bcd
0
AlexFalappa/nb-springboot,AlexFalappa/nb-springboot
/* * Copyright 2016 Alessandro Falappa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.alexfalappa.nbspringboot.projects.initializr; import java.awt.Dimension; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.Insets; import java.awt.Rectangle; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.Scrollable; import com.fasterxml.jackson.databind.JsonNode; import static javax.swing.SwingConstants.HORIZONTAL; /** * Specialized scrollable panel to manage a list of checkboxes groups each containing two columns of checkboxes. * <p> * The panel is dynamically filled processing a JSON tree received from the Spring Initializr rest service. * * @author Alessandro Falappa */ public class BootDependenciesPanel extends javax.swing.JPanel implements Scrollable { private static final String PROP_VERSION_RANGE = "versionRange"; private static final String PROP_DESCRIPTION = "boot.description"; private static final int OUTER_GAP = 4; private static final int INNER_GAP = 2; private static final int INDENT = 10; private static final int GROUP_SPACE = 16; private static final int TOOLTIP_WIDTH = 40; private boolean initialized = false; private final List<JCheckBox> chkBoxes = new ArrayList<>(); public BootDependenciesPanel() { initComponents(); } public void init(JsonNode metaData) { JsonNode depArray = metaData.path("dependencies").path("values"); final int nodeNum = depArray.size(); // remove informative label if (nodeNum > 0) { this.remove(lNotInitialized); } // prepare dependencies checkboxes int row = 0; for (int i = 0; i < nodeNum; i++) { JsonNode gn = depArray.get(i); // group label JLabel lGroup = new JLabel(gn.path("name").asText()); lGroup.setFont(lGroup.getFont().deriveFont(Font.BOLD, lGroup.getFont().getSize() + 2)); this.add(lGroup, constraintsForGroupLabel(row)); row++; // starter checkboxes in two columns final JsonNode valArray = gn.path("values"); for (int j = 0; j < valArray.size(); j++) { // first column JsonNode dn = valArray.get(j); this.add(checkBoxForNode(dn), constraintsForFirstColumnCheckbox(row)); // second column (optional) if (++j < valArray.size()) { dn = valArray.get(j); this.add(checkBoxForNode(dn), constraintsForSecondColumnCheckbox(row)); } row++; } } initialized = true; } public String getSelectedDependenciesString() { StringBuilder sb = new StringBuilder(); for (JCheckBox ch : chkBoxes) { if (ch.isEnabled() && ch.isSelected()) { sb.append(ch.getName()).append(','); } } // remove last comma (if present) if (sb.length() > 0) { sb.setLength(sb.length() - 1); } return sb.toString(); } void setSelectedDependenciesString(String deps) { HashSet<String> hs = new HashSet<>(Arrays.asList(deps.split(","))); for (JCheckBox cb : chkBoxes) { cb.setSelected(hs.contains(cb.getName())); } } public List<String> getSelectedDependencies() { List<String> ret = new ArrayList<>(); for (JCheckBox ch : chkBoxes) { if (ch.isEnabled() && ch.isSelected()) { ret.add(ch.getName()); } } return ret; } void setSelectedDependencies(List<String> deps) { HashSet<String> hs = new HashSet<>(deps); for (JCheckBox cb : chkBoxes) { cb.setSelected(hs.contains(cb.getName())); } } @Override public Dimension getPreferredScrollableViewportSize() { Dimension size = getPreferredSize(); if (initialized) { size = new Dimension(size.width, size.height / 8); } return size; } @Override public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) { if (orientation == HORIZONTAL) { return getPreferredSize().width / 2; } else { return getPreferredSize().height / 24; } } @Override public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) { if (orientation == HORIZONTAL) { return getPreferredSize().width / 2; } else { return getPreferredSize().height / 8; } } @Override public boolean getScrollableTracksViewportWidth() { return false; } @Override public boolean getScrollableTracksViewportHeight() { return false; } private JCheckBox checkBoxForNode(JsonNode dn) { final String name = dn.path("name").asText(); final String id = dn.path("id").asText(); final String description = dn.path("description").asText(); final String versRange = dn.path("versionRange").asText(); JCheckBox ch = new JCheckBox(name); ch.setName(id); ch.putClientProperty(PROP_VERSION_RANGE, versRange); ch.putClientProperty(PROP_DESCRIPTION, description); chkBoxes.add(ch); return ch; } private GridBagConstraints constraintsForSecondColumnCheckbox(int row) { GridBagConstraints gbc; gbc = new java.awt.GridBagConstraints(); gbc.gridx = 1; gbc.gridy = row; gbc.insets = new Insets(INNER_GAP, INNER_GAP, 0, 0); gbc.anchor = GridBagConstraints.LINE_START; return gbc; } private GridBagConstraints constraintsForFirstColumnCheckbox(int row) { GridBagConstraints gbc; gbc = new java.awt.GridBagConstraints(); gbc.gridx = 0; gbc.gridy = row; gbc.insets = new Insets(INNER_GAP, INDENT, 0, 0); gbc.anchor = GridBagConstraints.LINE_START; return gbc; } private GridBagConstraints constraintsForGroupLabel(int row) { GridBagConstraints gbc = new java.awt.GridBagConstraints(); gbc.gridx = 0; gbc.gridy = row; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = (row == 0) ? new Insets(OUTER_GAP, OUTER_GAP, 0, OUTER_GAP) : new Insets(GROUP_SPACE, OUTER_GAP, 0, OUTER_GAP); return gbc; } /** This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The content of this * method is always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { lNotInitialized = new javax.swing.JLabel(); setLayout(new java.awt.GridBagLayout()); lNotInitialized.setText("Not initialized"); lNotInitialized.setEnabled(false); add(lNotInitialized, new java.awt.GridBagConstraints()); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JLabel lNotInitialized; // End of variables declaration//GEN-END:variables void adaptToBootVersion(String bootVersion) { for (JCheckBox cb : chkBoxes) { String verRange = (String) cb.getClientProperty(PROP_VERSION_RANGE); String description = (String) cb.getClientProperty(PROP_DESCRIPTION); final boolean allowable = allowable(verRange, bootVersion); cb.setEnabled(allowable); cb.setToolTipText(prepTooltip(description, allowable, verRange)); } } private static boolean allowable(String verRange, String bootVersion) { boolean ret = true; if (verRange != null && !verRange.isEmpty()) { if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0 || verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) { // bounded range String[] bounds = verRange.substring(1, verRange.length() - 1).split(","); // check there are two bounds if (bounds.length != 2) { return false; } // test various cases if (bootVersion.compareTo(bounds[0]) > 0 && bootVersion.compareTo(bounds[1]) < 0) { return true; } else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("[")) { return true; } else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("(")) { return false; } else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith("]")) { return true; } else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith(")")) { return false; } else { return false; } } else { // unbounded range return bootVersion.compareTo(verRange) >= 0; } } return ret; } private String prepTooltip(String description, boolean allowable, String versRange) { StringBuilder sb = new StringBuilder(wrap(description)); if (!allowable) { sb.append("<br/><i>").append(decode(versRange)).append("</i>"); } return sb.toString(); } private StringBuilder wrap(String description) { StringBuilder sb = new StringBuilder("<html>"); String[] words = description.split(" "); String w = words[0]; sb.append(w); int len = w.length(); for (int i = 1; i < words.length; i++) { w = words[i]; if (len + w.length() + 1 > TOOLTIP_WIDTH) { sb.append("<br/><br/>").append(w); len = w.length(); } else { sb.append(" ").append(w); len += w.length() + 1; } } return sb; } private String decode(String verRange) { StringBuilder sb = new StringBuilder(); if (verRange != null && !verRange.isEmpty()) { if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0 || verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) { // bounded range String[] bounds = verRange.substring(1, verRange.length() - 1).split(","); // check there are two bounds if (bounds.length == 2) { sb.append(bounds[0]); if (verRange.startsWith("[")) { sb.append(" &lt;= "); } else if (verRange.startsWith("(")) { sb.append(" &lt; "); } sb.append("Boot version"); if (verRange.endsWith("]")) { sb.append(" &gt;= "); } else if (verRange.endsWith(")")) { sb.append(" &gt; "); } sb.append(bounds[1]); } } else { // unbounded range sb.append("Boot version &gt;= ").append(verRange); } } return sb.toString(); } }
src/main/java/com/github/alexfalappa/nbspringboot/projects/initializr/BootDependenciesPanel.java
/* * Copyright 2016 Alessandro Falappa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.alexfalappa.nbspringboot.projects.initializr; import java.awt.Dimension; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.Insets; import java.awt.Rectangle; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.Scrollable; import com.fasterxml.jackson.databind.JsonNode; import static javax.swing.SwingConstants.HORIZONTAL; /** * Specialized scrollable panel to manage a list of checkboxes groups each containing two columns of checkboxes. * <p> * The panel is dynamically filled processing a JSON tree received from the Spring Initializr rest service. * * @author Alessandro Falappa */ public class BootDependenciesPanel extends javax.swing.JPanel implements Scrollable { private static final String PROP_VERSION_RANGE = "versionRange"; private static final String PROP_DESCRIPTION = "boot.description"; private static final int OUTER_GAP = 4; private static final int INNER_GAP = 2; private static final int INDENT = 10; private static final int GROUP_SPACE = 16; private static final int TOOLTIP_WIDTH = 40; private boolean initialized = false; private final List<JCheckBox> chkBoxes = new ArrayList<>(); public BootDependenciesPanel() { initComponents(); } public void init(JsonNode metaData) { JsonNode depArray = metaData.path("dependencies").path("values"); final int nodeNum = depArray.size(); // remove informative label if (nodeNum > 0) { this.remove(lNotInitialized); } // prepare dependencies checkboxes int row = 0; for (int i = 0; i < nodeNum; i++) { JsonNode gn = depArray.get(i); // group label JLabel lGroup = new JLabel(gn.path("name").asText()); lGroup.setFont(lGroup.getFont().deriveFont(Font.BOLD, lGroup.getFont().getSize() + 2)); this.add(lGroup, constraintsForGroupLabel(row)); row++; // starter checkboxes in two columns final JsonNode valArray = gn.path("values"); for (int j = 0; j < valArray.size(); j++) { // first column JsonNode dn = valArray.get(j); this.add(checkBoxForNode(dn), constraintsForFirstColumnCheckbox(row)); // second column (optional) if (++j < valArray.size()) { dn = valArray.get(j); this.add(checkBoxForNode(dn), constraintsForSecondColumnCheckbox(row)); } row++; } } initialized = true; } public String getSelectedDependenciesString() { StringBuilder sb = new StringBuilder(); for (JCheckBox ch : chkBoxes) { if (ch.isEnabled() && ch.isSelected()) { sb.append(ch.getName()).append(','); } } // remove last comma (if present) if (sb.length() > 0) { sb.setLength(sb.length() - 1); } return sb.toString(); } void setSelectedDependenciesString(String deps) { HashSet<String> hs = new HashSet<>(Arrays.asList(deps.split(","))); for (JCheckBox cb : chkBoxes) { cb.setSelected(hs.contains(cb.getName())); } } public List<String> getSelectedDependencies() { List<String> ret = new ArrayList<>(); for (JCheckBox ch : chkBoxes) { if (ch.isEnabled() && ch.isSelected()) { ret.add(ch.getName()); } } return ret; } void setSelectedDependencies(List<String> deps) { HashSet<String> hs = new HashSet<>(deps); for (JCheckBox cb : chkBoxes) { cb.setSelected(hs.contains(cb.getName())); } } @Override public Dimension getPreferredScrollableViewportSize() { Dimension size = getPreferredSize(); if (initialized) { size = new Dimension(size.width, size.height / 8); } return size; } @Override public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) { if (orientation == HORIZONTAL) { return getPreferredSize().width / 2; } else { return getPreferredSize().height / 24; } } @Override public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) { if (orientation == HORIZONTAL) { return getPreferredSize().width / 2; } else { return getPreferredSize().height / 8; } } @Override public boolean getScrollableTracksViewportWidth() { return false; } @Override public boolean getScrollableTracksViewportHeight() { return false; } private JCheckBox checkBoxForNode(JsonNode dn) { final String name = dn.path("name").asText(); final String id = dn.path("id").asText(); final String description = dn.path("description").asText(); final String versRange = dn.path("versionRange").asText(); JCheckBox ch = new JCheckBox(name); ch.setName(id); ch.putClientProperty(PROP_VERSION_RANGE, versRange); ch.putClientProperty(PROP_DESCRIPTION, description); chkBoxes.add(ch); return ch; } private GridBagConstraints constraintsForSecondColumnCheckbox(int row) { GridBagConstraints gbc; gbc = new java.awt.GridBagConstraints(); gbc.gridx = 1; gbc.gridy = row; gbc.insets = new Insets(INNER_GAP, INNER_GAP, 0, 0); gbc.anchor = GridBagConstraints.LINE_START; return gbc; } private GridBagConstraints constraintsForFirstColumnCheckbox(int row) { GridBagConstraints gbc; gbc = new java.awt.GridBagConstraints(); gbc.gridx = 0; gbc.gridy = row; gbc.insets = new Insets(INNER_GAP, INDENT, 0, 0); gbc.anchor = GridBagConstraints.LINE_START; return gbc; } private GridBagConstraints constraintsForGroupLabel(int row) { GridBagConstraints gbc = new java.awt.GridBagConstraints(); gbc.gridx = 0; gbc.gridy = row; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = (row == 0) ? new Insets(OUTER_GAP, OUTER_GAP, 0, OUTER_GAP) : new Insets(GROUP_SPACE, OUTER_GAP, 0, OUTER_GAP); return gbc; } /** This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The content of this * method is always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { lNotInitialized = new javax.swing.JLabel(); setLayout(new java.awt.GridBagLayout()); lNotInitialized.setText("Not initialized"); lNotInitialized.setEnabled(false); add(lNotInitialized, new java.awt.GridBagConstraints()); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JLabel lNotInitialized; // End of variables declaration//GEN-END:variables void adaptToBootVersion(String bootVersion) { for (JCheckBox cb : chkBoxes) { String verRange = (String) cb.getClientProperty(PROP_VERSION_RANGE); String description = (String) cb.getClientProperty(PROP_DESCRIPTION); final boolean allowable = allowable(verRange, bootVersion); cb.setEnabled(allowable); cb.setToolTipText(prepTooltip(description, allowable, verRange)); } } private static boolean allowable(String verRange, String bootVersion) { boolean ret = true; if (verRange != null && !verRange.isEmpty()) { if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0 || verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) { // bounded range String[] bounds = verRange.substring(1, verRange.length() - 1).split(","); // check there are two bounds if (bounds.length != 2) { return false; } // test various cases if (bootVersion.compareTo(bounds[0]) > 0 && bootVersion.compareTo(bounds[1]) < 0) { return true; } else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("[")) { return true; } else if (bootVersion.compareTo(bounds[0]) == 0 && verRange.startsWith("(")) { return false; } else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith("]")) { return true; } else if (bootVersion.compareTo(bounds[1]) == 0 && verRange.endsWith(")")) { return false; } else { return false; } } else { // unbounded range return bootVersion.compareTo(verRange) >= 0; } } return ret; } private String prepTooltip(String description, boolean allowable, String versRange) { StringBuilder sb = new StringBuilder(wrap(description)); if (!allowable) { sb.append("<br/><i>").append(decode(versRange)).append("</i>"); } return sb.toString(); } private StringBuilder wrap(String description) { StringBuilder sb = new StringBuilder("<html>"); String[] words = description.split(" "); String w = words[0]; sb.append(w); int len = w.length(); for (int i = 1; i < words.length; i++) { w = words[i]; if (len + w.length() + 1 > TOOLTIP_WIDTH) { sb.append("<br/>").append(w); len = w.length(); } else { sb.append(" ").append(w); len += w.length() + 1; } } return sb; } private String decode(String verRange) { StringBuilder sb = new StringBuilder(); if (verRange != null && !verRange.isEmpty()) { if (verRange.indexOf('[') >= 0 || verRange.indexOf('(') >= 0 || verRange.indexOf(']') >= 0 || verRange.indexOf(')') >= 0) { // bounded range String[] bounds = verRange.substring(1, verRange.length() - 1).split(","); // check there are two bounds if (bounds.length == 2) { sb.append(bounds[0]); if (verRange.startsWith("[")) { sb.append(" &lt;= "); } else if (verRange.startsWith("(")) { sb.append(" &lt; "); } sb.append("Version"); if (verRange.endsWith("]")) { sb.append(" &gt;= "); } else if (verRange.endsWith(")")) { sb.append(" &gt; "); } sb.append(bounds[1]); } } else { // unbounded range sb.append("Version &gt;= ").append(verRange); } } return sb.toString(); } }
Initializr project: tweaks to tooltips for disabled dependencies
src/main/java/com/github/alexfalappa/nbspringboot/projects/initializr/BootDependenciesPanel.java
Initializr project: tweaks to tooltips for disabled dependencies
Java
bsd-3-clause
a623fef0d073321a5f34eb23147baaa9cc2e9316
0
jgiannoules/proxstor,jgiannoules/proxstor
package com.giannoules.proxstor.knows; import com.giannoules.proxstor.ProxStorUtil; import com.giannoules.proxstor.api.Location; import com.giannoules.proxstor.exception.InvalidLocationId; import com.giannoules.proxstor.exception.InvalidModel; import com.giannoules.proxstor.exception.LocationAlreadyNearbyLocation; import com.giannoules.proxstor.location.LocationDao; import com.giannoules.proxstor.nearby.NearbyDao; import com.tinkerpop.blueprints.Edge; import java.net.URI; import java.net.URISyntaxException; import java.util.logging.Level; import java.util.logging.Logger; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.core.Response; public class NearbyLocationResource { private final String locIdA; private final String locIdB; private final Integer distanceVal; public NearbyLocationResource(String locIdA, String locIdB, Integer distanceVal) { this.locIdA = locIdA; this.locIdB = locIdB; this.distanceVal = distanceVal; } /* * test whether locA is within distance from locB */ @GET public Response getLocationsWithinDistance() { try { Location a = LocationDao.instance.get(locIdA); Location b = LocationDao.instance.get(locIdB); if ((a != null) && (b != null) && (a.getLatitude() != null) && (a.getLongitude() != null) && (b.getLatitude() != null) && (b.getLongitude() != null)) { Double actualDistance = LocationDao.instance.distanceBetweenLocations(a, b); if (actualDistance <= distanceVal) { return Response.noContent().build(); } } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); } return Response.status(404).build(); } /* * establish the nearby relationship between locations with distance * * success - returns 201 (Created) with URI of new Nearby relationship * failure - returns 404 (Not Found) if either locID is invalid * returns 400 (Bad Request) if the Nearby is already established * returns 500 (Server Error) if the Nearby could be established * but URI building error occurred */ @POST public Response establishLocationNearby() { try { if (!NearbyDao.instance.addNearby(locIdA, locIdB, distanceVal)) { return Response.status(500).build(); } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.status(404).build(); } catch (LocationAlreadyNearbyLocation ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.status(400).build(); } URI createdUri; try { createdUri = new URI("/locations/" + ProxStorUtil.cleanPath(locIdA) + "/nearby/" + distanceVal.toString() + "/" + ProxStorUtil.cleanPath(locIdB)); return Response.created(createdUri).build(); } catch (URISyntaxException ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.serverError().build(); } } /* * update distance of nearby relationship between locations * * success - returns 204 (No Content) * failure - returns 404 (Not Found) if either locID is invalid * returns 400 (Bad Request) if the nearby relationship was not already established */ @PUT public Response updateLocationNearby() { try { if (NearbyDao.instance.updateNearby(locIdA, locIdB, distanceVal)) { return Response.noContent().build(); } else { return Response.status(400).build(); } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.status(404).build(); } } /* * remove the nearby relationship between locations * * note that distanceVal is ignored * * returns 204 (No Content) when successful * returns 404 (Not Found) if relationship was not already established or * the locIds are simply invalid */ @DELETE public Response removeLocationNearby() { try { if (NearbyDao.instance.removeNearby(locIdA, locIdB)) { return Response.noContent().build(); } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); } return Response.status(404).build(); } }
src/proxstor-webapp/src/main/java/com/giannoules/proxstor/nearby/NearbyLocationResource.java
package com.giannoules.proxstor.knows; import com.giannoules.proxstor.ProxStorUtil; import com.giannoules.proxstor.exception.InvalidLocationId; import com.giannoules.proxstor.exception.InvalidModel; import com.giannoules.proxstor.exception.LocationAlreadyNearbyLocation; import com.giannoules.proxstor.nearby.NearbyDao; import com.tinkerpop.blueprints.Edge; import java.net.URI; import java.net.URISyntaxException; import java.util.logging.Level; import java.util.logging.Logger; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.core.Response; public class NearbyLocationResource { private final String locIdA; private final String locIdB; private final Integer distanceVal; public NearbyLocationResource(String locIdA, String locIdB, Integer distanceVal) { this.locIdA = locIdA; this.locIdB = locIdB; this.distanceVal = distanceVal; } /* * test whether locA is within distance from locB */ @GET public Response getLocationsWithinDistance() { try { try { Edge e = NearbyDao.instance.getNearby(locIdA, locIdB); Integer distance = e.getProperty("distance"); if ((distance != null) && (distance <= distanceVal)) { return Response.noContent().build(); } } catch (InvalidModel ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); } return Response.status(404).build(); } /* * establish the nearby relationship between locations with distance * * success - returns 201 (Created) with URI of new Nearby relationship * failure - returns 404 (Not Found) if either locID is invalid * returns 400 (Bad Request) if the Nearby is already established * returns 500 (Server Error) if the Nearby could be established * but URI building error occurred */ @POST public Response establishLocationNearby() { try { if (!NearbyDao.instance.addNearby(locIdA, locIdB, distanceVal)) { return Response.status(500).build(); } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.status(404).build(); } catch (LocationAlreadyNearbyLocation ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.status(400).build(); } URI createdUri; try { createdUri = new URI("/locations/" + ProxStorUtil.cleanPath(locIdA) + "/nearby/" + distanceVal.toString() + "/" + ProxStorUtil.cleanPath(locIdB)); return Response.created(createdUri).build(); } catch (URISyntaxException ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.serverError().build(); } } /* * update distance of nearby relationship between locations * * success - returns 204 (No Content) * failure - returns 404 (Not Found) if either locID is invalid * returns 400 (Bad Request) if the nearby relationship was not already established */ @PUT public Response updateLocationNearby() { try { if (NearbyDao.instance.updateNearby(locIdA, locIdB, distanceVal)) { return Response.noContent().build(); } else { return Response.status(400).build(); } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); return Response.status(404).build(); } } /* * remove the nearby relationship between locations * * note that distanceVal is ignored * * returns 204 (No Content) when successful * returns 404 (Not Found) if relationship was not already established or * the locIds are simply invalid */ @DELETE public Response removeLocationNearby() { try { if (NearbyDao.instance.removeNearby(locIdA, locIdB)) { return Response.noContent().build(); } } catch (InvalidLocationId ex) { Logger.getLogger(NearbyLocationResource.class.getName()).log(Level.SEVERE, null, ex); } return Response.status(404).build(); } }
updated getLocationWithinDistance to use LocationDao's distanceBetweenLocations
src/proxstor-webapp/src/main/java/com/giannoules/proxstor/nearby/NearbyLocationResource.java
updated getLocationWithinDistance to use LocationDao's distanceBetweenLocations
Java
bsd-3-clause
e0a42e220adf08179e857c6e102d7808a2494248
0
CBIIT/caaers,NCIP/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers
package gov.nih.nci.cabig.ctms.tools; import gov.nih.nci.cabig.caaers.security.SecurityTestUtils; import gov.nih.nci.cabig.ctms.audit.domain.DataAuditInfo; import gov.nih.nci.security.acegi.csm.authorization.AuthorizationSwitch; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.commons.lang.StringUtils; import org.springframework.beans.factory.annotation.Required; import org.springframework.mock.jndi.SimpleNamingContextBuilder; import org.springframework.test.AbstractTransactionalSpringContextTests; import javax.naming.NamingException; import java.io.File; import java.util.Date; public class XLstudyImporterTest extends AbstractTransactionalSpringContextTests { private XLstudyImporter XLstudyImporter; private static String fileName; public XLstudyImporterTest(String testName) { super(testName); } public static void main(final String[] args) { if (StringUtils.isBlank(args[0])) { System.out.println("no file name specified"); return; } fileName = args[0]; System.out.println("file name :" + args[0]); junit.textui.TestRunner.run(suite()); } public static Test suite() { TestSuite suite = new TestSuite(); /* * NOTE: These tests CANNOT be run in succession because it will cause the maximum number of connections to be exceeded. */ suite.addTest(new XLstudyImporterTest("testImport")); return suite; } public void testImport() { try { File inputFile = new File(fileName); XLstudyImporter.importXLstudy(inputFile); setComplete(); endTransaction(); } catch (Exception ex) { System.out.println("\n Error occured: "); ex.printStackTrace(); } } @Override protected void onSetUpInTransaction() throws Exception { super.onSetUpInTransaction(); //To change body of overridden methods use File | Settings | File Templates. try { SimpleNamingContextBuilder.emptyActivatedContextBuilder(); } catch (NamingException e) { throw new RuntimeException("", e); } SecurityTestUtils.switchToSuperuser(); AuthorizationSwitch sw = (AuthorizationSwitch) applicationContext.getBean("authorizationSwitch"); if (sw == null) throw new RuntimeException("Authorization switch not found"); boolean current = sw.isOn(); sw.setOn(false); String identity = "ANONYMOUS"; String info = "importStudy"; gov.nih.nci.cabig.ctms.audit.domain.DataAuditInfo.setLocal(new DataAuditInfo(identity, "localhost", new Date(), info)); } @Override public String[] getConfigLocations() { return new String[]{ "classpath*:gov/nih/nci/cabig/caaers/applicationContext-configProperties.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-dao.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-spring.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-security.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-service.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-test-security.xml", "classpath*:applicationContext-test.xml" }; } @Required public void setXLstudyImporter(XLstudyImporter XLstudyImporter) { this.XLstudyImporter = XLstudyImporter; } }
projects/core/src/test/java/gov/nih/nci/cabig/ctms/tools/XLstudyImporterTest.java
package gov.nih.nci.cabig.ctms.tools; import gov.nih.nci.cabig.caaers.security.SecurityTestUtils; import gov.nih.nci.cabig.ctms.audit.domain.DataAuditInfo; import gov.nih.nci.security.acegi.csm.authorization.AuthorizationSwitch; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.commons.lang.StringUtils; import org.springframework.beans.factory.annotation.Required; import org.springframework.mock.jndi.SimpleNamingContextBuilder; import org.springframework.test.AbstractTransactionalSpringContextTests; import javax.naming.NamingException; import java.io.File; import java.util.Date; public class XLstudyImporterTest extends AbstractTransactionalSpringContextTests { private XLstudyImporter XLstudyImporter; private static String fileName; public XLstudyImporterTest(String testName) { super(testName); } public static void main(final String[] args) { if (StringUtils.isBlank(args[0])) { System.out.println("no file name specified"); return; } fileName = args[0]; System.out.println("file name :" + args[0]); junit.textui.TestRunner.run(suite()); } public static Test suite() { TestSuite suite = new TestSuite(); /* * NOTE: These tests CANNOT be run in succession because it will cause the maximum number of connections to be exceeded. */ suite.addTest(new XLstudyImporterTest("testImport")); // suite.addTest(new PSCAdverseEventConsumerTest("testCreateNotificationLocal")); return suite; } public void testImport() { try { File inputFile = new File(fileName); XLstudyImporter.importXLstudy(inputFile); setComplete(); endTransaction(); } catch (Exception ex) { System.out.println("\n Error occured: "); ex.printStackTrace(); } } @Override protected void onSetUpInTransaction() throws Exception { super.onSetUpInTransaction(); //To change body of overridden methods use File | Settings | File Templates. try { SimpleNamingContextBuilder.emptyActivatedContextBuilder(); } catch (NamingException e) { throw new RuntimeException("", e); } SecurityTestUtils.switchToSuperuser(); AuthorizationSwitch sw = (AuthorizationSwitch) applicationContext.getBean("authorizationSwitch"); if (sw == null) throw new RuntimeException("Authorization switch not found"); boolean current = sw.isOn(); sw.setOn(false); String identity = "ANONYMOUS"; String info = "importStudy"; gov.nih.nci.cabig.ctms.audit.domain.DataAuditInfo.setLocal(new DataAuditInfo(identity, "localhost", new Date(), info)); } @Override public String[] getConfigLocations() { // return new String[]{ // "classpath*:gov/nih/nci/cabig/caaers/applicationContext-*.xml", // "classpath*:applicationContext-test.xml" // }; return new String[]{ "classpath*:gov/nih/nci/cabig/caaers/applicationContext-configProperties.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-dao.xml", //"classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-db.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-spring.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-security.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-core-service.xml", "classpath*:gov/nih/nci/cabig/caaers/applicationContext-test-security.xml", "classpath*:applicationContext-test.xml" }; } @Required public void setXLstudyImporter(XLstudyImporter XLstudyImporter) { this.XLstudyImporter = XLstudyImporter; } }
SVN-Revision: 6106
projects/core/src/test/java/gov/nih/nci/cabig/ctms/tools/XLstudyImporterTest.java
Java
bsd-3-clause
bf285c86912e13a075c4d2bc6ab61b6af079049b
0
NCIP/webgenome,NCIP/webgenome,NCIP/webgenome,NCIP/webgenome
/* $Revision: 1.7 $ $Date: 2008-02-22 18:24:44 $ The Web CGH Software License, Version 1.0 Copyright 2003 RTI. This software was developed in conjunction with the National Cancer Institute, and so to the extent government employees are co-authors, any rights in such works shall be subject to Title 17 of the United States Code, section 105. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the disclaimer of Article 3, below. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This product includes software developed by the RTI and the National Cancer Institute." If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself, wherever such third-party acknowledgments normally appear. 3. The names "The National Cancer Institute", "NCI", Research Triangle Institute, and "RTI" must not be used to endorse or promote products derived from this software. 4. This license does not authorize the incorporation of this software into any proprietary programs. This license does not authorize the recipient to use any trademarks owned by either NCI or RTI. 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, RTI, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.rti.webgenome.service.job; import java.util.Set; import org.apache.log4j.Logger; import org.rti.webgenome.analysis.AnalyticException; import org.rti.webgenome.domain.Experiment; import org.rti.webgenome.domain.Plot; import org.rti.webgenome.domain.ShoppingCart; import org.rti.webgenome.service.analysis.AnalysisService; import org.rti.webgenome.service.analysis.SerializedDataTransformer; import org.rti.webgenome.service.session.WebGenomeDbService; import org.rti.webgenome.service.util.SerializedChromosomeArrayDataGetter; /** * A job to re-run all producing analytic operations on * all derived experiments in a single plot. * @author dhall * */ public class ReRunAnalysisOnPlotExperimentsJob extends AbstractJob { /** Logger. */ private static final Logger LOGGER = Logger.getLogger( ReRunAnalysisOnPlotExperimentsJob.class); // // A T T R I B U T E S // /** * Experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. */ private Set<Experiment> experiments = null; /** ID of plot to re-create. */ private Long plotId = null; // // G E T T E R S / S E T T E R S // /** * Get experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. * @return Experiments that will be re-generated. */ public Set<Experiment> getExperiments() { return experiments; } /** * Set experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. * @param experiments Experiments that will be re-generated */ public void setExperiments(final Set<Experiment> experiments) { this.experiments = experiments; } /** * Get ID of plot to regenerate. * @return Plot primary key ID */ public Long getPlotId() { return plotId; } /** * Set ID of plot to regenerate. * @param plotId Primary key ID */ public void setPlotId(final Long plotId) { this.plotId = plotId; } // // C O N S T R U C T O R S // /** * Constructor. This should only be used by the * persistence framework. */ public ReRunAnalysisOnPlotExperimentsJob() { } /** * Constructor. * @param experiments Experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. * @param plotId Primary key ID of plot to regenerate * @param userId User login name * @param userDomain Domain in which user name applies. */ public ReRunAnalysisOnPlotExperimentsJob( final Set<Experiment> experiments, final Long plotId, final Long userId, final String userDomain) { super(userId, userDomain); this.experiments = experiments; this.plotId = plotId; StringBuffer buff = new StringBuffer( "Regenerating plot with experiments "); int count = 0; for (Experiment exp : experiments) { if (count++ > 0) { buff.append(" ,"); } buff.append(exp.getName()); } this.setDescription(buff.toString()); } // // O V E R R I D E S // /** * {@inheritDoc} */ @Override public void execute(final JobServices jobServices) { SerializedDataTransformer transformer = jobServices.getIoService().getSerializedDataTransformer(); AnalysisService aService = jobServices.getAnalysisService(); WebGenomeDbService dbService = jobServices.getWebGenomeDbService(); ShoppingCart cart = dbService.loadShoppingCart(this.getUserId(), this.getUserDomain()); SerializedChromosomeArrayDataGetter dataGetter = jobServices.getIoService().getSerializedChromosomeArrayDataGetter(); try { LOGGER.info("Plot re-analysis job starting for user " + this.getUserId()); // Re-do analytic operation Set<String> replacedFiles = aService.rePerformAnalyticOperation( this.experiments, transformer); // Plot Plot plot = cart.getPlot(this.plotId); jobServices.getPlotService().plotExperiments(plot, this.experiments, plot.getPlotParameters(), cart, dataGetter); // Persist dbService.updateExperimentsAndCart(this.experiments, cart); jobServices.getIoService().deleteDataFiles(replacedFiles); this.setTerminationMessage(Job.JOB_EXECUTION_SUCCESS_MESSAGE); LOGGER.info("Plot re-analysis job completed for user " + this.getUserId()); } catch (AnalyticException e) { this.setTerminationMessage( Job.JOB_EXECUTION_FAILURE_MESSAGE + ": " + e.getMessage()); LOGGER.info("Plot re-analysis job failed for user " + this.getUserId()); LOGGER.info(e); e.printStackTrace(); } } }
java/core/src/org/rti/webgenome/service/job/ReRunAnalysisOnPlotExperimentsJob.java
/* $Revision: 1.7 $ $Date: 2008-02-22 18:24:44 $ The Web CGH Software License, Version 1.0 Copyright 2003 RTI. This software was developed in conjunction with the National Cancer Institute, and so to the extent government employees are co-authors, any rights in such works shall be subject to Title 17 of the United States Code, section 105. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the disclaimer of Article 3, below. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This product includes software developed by the RTI and the National Cancer Institute." If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself, wherever such third-party acknowledgments normally appear. 3. The names "The National Cancer Institute", "NCI", Research Triangle Institute, and "RTI" must not be used to endorse or promote products derived from this software. 4. This license does not authorize the incorporation of this software into any proprietary programs. This license does not authorize the recipient to use any trademarks owned by either NCI or RTI. 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, RTI, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.rti.webgenome.service.job; import java.util.Set; import org.apache.log4j.Logger; import org.rti.webgenome.analysis.AnalyticException; import org.rti.webgenome.domain.Experiment; import org.rti.webgenome.domain.Plot; import org.rti.webgenome.domain.ShoppingCart; import org.rti.webgenome.service.analysis.AnalysisService; import org.rti.webgenome.service.analysis.SerializedDataTransformer; import org.rti.webgenome.service.session.WebGenomeDbService; import org.rti.webgenome.service.util.SerializedChromosomeArrayDataGetter; /** * A job to re-run all producing analytic operations on * all derived experiments in a single plot. * @author dhall * */ public class ReRunAnalysisOnPlotExperimentsJob extends AbstractJob { /** Logger. */ private static final Logger LOGGER = Logger.getLogger( ReRunAnalysisOnPlotExperimentsJob.class); // // A T T R I B U T E S // /** * Experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. */ private Set<Experiment> experiments = null; /** ID of plot to re-create. */ private Long plotId = null; // // G E T T E R S / S E T T E R S // /** * Get experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. * @return Experiments that will be re-generated. */ public Set<Experiment> getExperiments() { return experiments; } /** * Set experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. * @param experiments Experiments that will be re-generated */ public void setExperiments(final Set<Experiment> experiments) { this.experiments = experiments; } /** * Get ID of plot to regenerate. * @return Plot primary key ID */ public Long getPlotId() { return plotId; } /** * Set ID of plot to regenerate. * @param plotId Primary key ID */ public void setPlotId(final Long plotId) { this.plotId = plotId; } // // C O N S T R U C T O R S // /** * Constructor. This should only be used by the * persistence framework. */ public ReRunAnalysisOnPlotExperimentsJob() { } /** * Constructor. * @param experiments Experiments that will be re-generated. All * experiments must be derived from an analytic * operation. Furthermore, the data source property * in each experiment should include new user-specified * analytic operation parameter values. * @param plotId Primary key ID of plot to regenerate * @param userId User login name * @param userDomain Domain in which user name applies. */ public ReRunAnalysisOnPlotExperimentsJob( final Set<Experiment> experiments, final Long plotId, final String userId, final String userDomain) { super(userId, userDomain); this.experiments = experiments; this.plotId = plotId; StringBuffer buff = new StringBuffer( "Regenerating plot with experiments "); int count = 0; for (Experiment exp : experiments) { if (count++ > 0) { buff.append(" ,"); } buff.append(exp.getName()); } this.setDescription(buff.toString()); } // // O V E R R I D E S // /** * {@inheritDoc} */ @Override public void execute(final JobServices jobServices) { SerializedDataTransformer transformer = jobServices.getIoService().getSerializedDataTransformer(); AnalysisService aService = jobServices.getAnalysisService(); WebGenomeDbService dbService = jobServices.getWebGenomeDbService(); ShoppingCart cart = dbService.loadShoppingCart(this.getUserId(), this.getUserDomain()); SerializedChromosomeArrayDataGetter dataGetter = jobServices.getIoService().getSerializedChromosomeArrayDataGetter(); try { LOGGER.info("Plot re-analysis job starting for user " + this.getUserId()); // Re-do analytic operation Set<String> replacedFiles = aService.rePerformAnalyticOperation( this.experiments, transformer); // Plot Plot plot = cart.getPlot(this.plotId); jobServices.getPlotService().plotExperiments(plot, this.experiments, plot.getPlotParameters(), cart, dataGetter); // Persist dbService.updateExperimentsAndCart(this.experiments, cart); jobServices.getIoService().deleteDataFiles(replacedFiles); this.setTerminationMessage(Job.JOB_EXECUTION_SUCCESS_MESSAGE); LOGGER.info("Plot re-analysis job completed for user " + this.getUserId()); } catch (AnalyticException e) { this.setTerminationMessage( Job.JOB_EXECUTION_FAILURE_MESSAGE + ": " + e.getMessage()); LOGGER.info("Plot re-analysis job failed for user " + this.getUserId()); LOGGER.info(e); e.printStackTrace(); } } }
#19706 - changed UserId from String to Long SVN-Revision: 13982
java/core/src/org/rti/webgenome/service/job/ReRunAnalysisOnPlotExperimentsJob.java
#19706 - changed UserId from String to Long
Java
bsd-3-clause
c0cfdccc411c357ca659ba48227ae8765cbf610f
0
NCIP/caintegrator,NCIP/caintegrator,NCIP/caintegrator,NCIP/caintegrator,NCIP/caintegrator
/** * The software subject to this notice and license includes both human readable * source code form and machine readable, binary, object code form. The caIntegrator2 * Software was developed in conjunction with the National Cancer Institute * (NCI) by NCI employees, 5AM Solutions, Inc. (5AM), ScenPro, Inc. (ScenPro) * and Science Applications International Corporation (SAIC). To the extent * government employees are authors, any rights in such works shall be subject * to Title 17 of the United States Code, section 105. * * This caIntegrator2 Software License (the License) is between NCI and You. You (or * Your) shall mean a person or an entity, and all other entities that control, * are controlled by, or are under common control with the entity. Control for * purposes of this definition means (i) the direct or indirect power to cause * the direction or management of such entity, whether by contract or otherwise, * or (ii) ownership of fifty percent (50%) or more of the outstanding shares, * or (iii) beneficial ownership of such entity. * * This License is granted provided that You agree to the conditions described * below. NCI grants You a non-exclusive, worldwide, perpetual, fully-paid-up, * no-charge, irrevocable, transferable and royalty-free right and license in * its rights in the caIntegrator2 Software to (i) use, install, access, operate, * execute, copy, modify, translate, market, publicly display, publicly perform, * and prepare derivative works of the caIntegrator2 Software; (ii) distribute and * have distributed to and by third parties the caIntegrator2 Software and any * modifications and derivative works thereof; and (iii) sublicense the * foregoing rights set out in (i) and (ii) to third parties, including the * right to license such rights to further third parties. For sake of clarity, * and not by way of limitation, NCI shall have no right of accounting or right * of payment from You or Your sub-licensees for the rights granted under this * License. This License is granted at no charge to You. * * Your redistributions of the source code for the Software must retain the * above copyright notice, this list of conditions and the disclaimer and * limitation of liability of Article 6, below. Your redistributions in object * code form must reproduce the above copyright notice, this list of conditions * and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * * Your end-user documentation included with the redistribution, if any, must * include the following acknowledgment: This product includes software * developed by 5AM, ScenPro, SAIC and the National Cancer Institute. If You do * not include such end-user documentation, You shall include this acknowledgment * in the Software itself, wherever such third-party acknowledgments normally * appear. * * You may not use the names "The National Cancer Institute", "NCI", "ScenPro", * "SAIC" or "5AM" to endorse or promote products derived from this Software. * This License does not authorize You to use any trademarks, service marks, * trade names, logos or product names of either NCI, ScenPro, SAID or 5AM, * except as required to comply with the terms of this License. * * For sake of clarity, and not by way of limitation, You may incorporate this * Software into Your proprietary programs and into any third party proprietary * programs. However, if You incorporate the Software into third party * proprietary programs, You agree that You are solely responsible for obtaining * any permission from such third parties required to incorporate the Software * into such third party proprietary programs and for informing Your a * sub-licensees, including without limitation Your end-users, of their * obligation to secure any required permissions from such third parties before * incorporating the Software into such third party proprietary software * programs. In the event that You fail to obtain such permissions, You agree * to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such * permissions. * * For sake of clarity, and not by way of limitation, You may add Your own * copyright statement to Your modifications and to the derivative works, and * You may provide additional or different license terms and conditions in Your * sublicenses of modifications of the Software, or any derivative works of the * Software as a whole, provided Your use, reproduction, and distribution of the * Work otherwise complies with the conditions stated in this License. * * THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, * (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, * NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO * EVENT SHALL THE NATIONAL CANCER INSTITUTE, 5AM SOLUTIONS, INC., SCENPRO, INC., * SCIENCE APPLICATIONS INTERNATIONAL CORPORATION OR THEIR * AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.nih.nci.caintegrator2.application.study; import gov.nih.nci.caintegrator2.domain.genomic.Sample; import gov.nih.nci.caintegrator2.domain.genomic.SampleAcquisition; import gov.nih.nci.caintegrator2.domain.translational.StudySubjectAssignment; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import au.com.bytecode.opencsv.CSVReader; /** * Helper class used to map samples to subjects. */ class SampleMappingHelper { private final StudyConfiguration studyConfiguration; private final File mappingFile; private Map<String, Sample> sampleNameMap; SampleMappingHelper(StudyConfiguration studyConfiguration, File mappingFile) { this.studyConfiguration = studyConfiguration; this.mappingFile = mappingFile; } void mapSamples() { try { CSVReader reader = new CSVReader(new FileReader(mappingFile)); String[] values; while ((values = reader.readNext()) != null) { String subjectIdentifier = values[0]; String sampleName = values[1]; StudySubjectAssignment sja = getSubjectAssignment(subjectIdentifier); // map is throwing an exception. This is a temporary check for null to prevent it. if (!(sja == null)) { map(sja, getSample(sampleName)); } } } catch (IOException e) { throw new IllegalStateException("Unexpected IO error", e); } } private void map(StudySubjectAssignment subjectAssignment, Sample sample) { SampleAcquisition sampleAcquisition = new SampleAcquisition(); sampleAcquisition.setSample(sample); if (subjectAssignment.getSampleAcquisitionCollection() == null) { subjectAssignment.setSampleAcquisitionCollection(new HashSet<SampleAcquisition>()); } subjectAssignment.getSampleAcquisitionCollection().add(sampleAcquisition); } private Sample getSample(String sampleName) { return getSampleNameMap().get(sampleName); } private Map<String, Sample> getSampleNameMap() { if (sampleNameMap == null) { sampleNameMap = createSampleNameMap(); } return sampleNameMap; } private Map<String, Sample> createSampleNameMap() { sampleNameMap = new HashMap<String, Sample>(); for (GenomicDataSourceConfiguration sourceConfiguration : studyConfiguration.getGenomicDataSources()) { for (Sample sample : sourceConfiguration.getSamples()) { sampleNameMap.put(sample.getName(), sample); } } return sampleNameMap; } private StudySubjectAssignment getSubjectAssignment(String subjectIdentifier) { return studyConfiguration.getSubjectAssignment(subjectIdentifier); } }
caintegrator2-war/src/gov/nih/nci/caintegrator2/application/study/SampleMappingHelper.java
/** * The software subject to this notice and license includes both human readable * source code form and machine readable, binary, object code form. The caIntegrator2 * Software was developed in conjunction with the National Cancer Institute * (NCI) by NCI employees, 5AM Solutions, Inc. (5AM), ScenPro, Inc. (ScenPro) * and Science Applications International Corporation (SAIC). To the extent * government employees are authors, any rights in such works shall be subject * to Title 17 of the United States Code, section 105. * * This caIntegrator2 Software License (the License) is between NCI and You. You (or * Your) shall mean a person or an entity, and all other entities that control, * are controlled by, or are under common control with the entity. Control for * purposes of this definition means (i) the direct or indirect power to cause * the direction or management of such entity, whether by contract or otherwise, * or (ii) ownership of fifty percent (50%) or more of the outstanding shares, * or (iii) beneficial ownership of such entity. * * This License is granted provided that You agree to the conditions described * below. NCI grants You a non-exclusive, worldwide, perpetual, fully-paid-up, * no-charge, irrevocable, transferable and royalty-free right and license in * its rights in the caIntegrator2 Software to (i) use, install, access, operate, * execute, copy, modify, translate, market, publicly display, publicly perform, * and prepare derivative works of the caIntegrator2 Software; (ii) distribute and * have distributed to and by third parties the caIntegrator2 Software and any * modifications and derivative works thereof; and (iii) sublicense the * foregoing rights set out in (i) and (ii) to third parties, including the * right to license such rights to further third parties. For sake of clarity, * and not by way of limitation, NCI shall have no right of accounting or right * of payment from You or Your sub-licensees for the rights granted under this * License. This License is granted at no charge to You. * * Your redistributions of the source code for the Software must retain the * above copyright notice, this list of conditions and the disclaimer and * limitation of liability of Article 6, below. Your redistributions in object * code form must reproduce the above copyright notice, this list of conditions * and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * * Your end-user documentation included with the redistribution, if any, must * include the following acknowledgment: This product includes software * developed by 5AM, ScenPro, SAIC and the National Cancer Institute. If You do * not include such end-user documentation, You shall include this acknowledgment * in the Software itself, wherever such third-party acknowledgments normally * appear. * * You may not use the names "The National Cancer Institute", "NCI", "ScenPro", * "SAIC" or "5AM" to endorse or promote products derived from this Software. * This License does not authorize You to use any trademarks, service marks, * trade names, logos or product names of either NCI, ScenPro, SAID or 5AM, * except as required to comply with the terms of this License. * * For sake of clarity, and not by way of limitation, You may incorporate this * Software into Your proprietary programs and into any third party proprietary * programs. However, if You incorporate the Software into third party * proprietary programs, You agree that You are solely responsible for obtaining * any permission from such third parties required to incorporate the Software * into such third party proprietary programs and for informing Your a * sub-licensees, including without limitation Your end-users, of their * obligation to secure any required permissions from such third parties before * incorporating the Software into such third party proprietary software * programs. In the event that You fail to obtain such permissions, You agree * to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such * permissions. * * For sake of clarity, and not by way of limitation, You may add Your own * copyright statement to Your modifications and to the derivative works, and * You may provide additional or different license terms and conditions in Your * sublicenses of modifications of the Software, or any derivative works of the * Software as a whole, provided Your use, reproduction, and distribution of the * Work otherwise complies with the conditions stated in this License. * * THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, * (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, * NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO * EVENT SHALL THE NATIONAL CANCER INSTITUTE, 5AM SOLUTIONS, INC., SCENPRO, INC., * SCIENCE APPLICATIONS INTERNATIONAL CORPORATION OR THEIR * AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.nih.nci.caintegrator2.application.study; import gov.nih.nci.caintegrator2.domain.genomic.Sample; import gov.nih.nci.caintegrator2.domain.genomic.SampleAcquisition; import gov.nih.nci.caintegrator2.domain.translational.StudySubjectAssignment; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import au.com.bytecode.opencsv.CSVReader; /** * Helper class used to map samples to subjects. */ class SampleMappingHelper { private final StudyConfiguration studyConfiguration; private final File mappingFile; private Map<String, Sample> sampleNameMap; SampleMappingHelper(StudyConfiguration studyConfiguration, File mappingFile) { this.studyConfiguration = studyConfiguration; this.mappingFile = mappingFile; } void mapSamples() { try { CSVReader reader = new CSVReader(new FileReader(mappingFile)); String[] values; while ((values = reader.readNext()) != null) { String subjectIdentifier = values[0]; String sampleName = values[1]; map(getSubjectAssignment(subjectIdentifier), getSample(sampleName)); } } catch (IOException e) { throw new IllegalStateException("Unexpected IO error", e); } } private void map(StudySubjectAssignment subjectAssignment, Sample sample) { SampleAcquisition sampleAcquisition = new SampleAcquisition(); sampleAcquisition.setSample(sample); if (subjectAssignment.getSampleAcquisitionCollection() == null) { subjectAssignment.setSampleAcquisitionCollection(new HashSet<SampleAcquisition>()); } subjectAssignment.getSampleAcquisitionCollection().add(sampleAcquisition); } private Sample getSample(String sampleName) { return getSampleNameMap().get(sampleName); } private Map<String, Sample> getSampleNameMap() { if (sampleNameMap == null) { sampleNameMap = createSampleNameMap(); } return sampleNameMap; } private Map<String, Sample> createSampleNameMap() { sampleNameMap = new HashMap<String, Sample>(); for (GenomicDataSourceConfiguration sourceConfiguration : studyConfiguration.getGenomicDataSources()) { for (Sample sample : sourceConfiguration.getSamples()) { sampleNameMap.put(sample.getName(), sample); } } return sampleNameMap; } private StudySubjectAssignment getSubjectAssignment(String subjectIdentifier) { return studyConfiguration.getSubjectAssignment(subjectIdentifier); } }
Mapping the sample mapping file is causing an exception. I'm breaking out the map method as an intermediate step and checking for a null so that this can be debugged.
caintegrator2-war/src/gov/nih/nci/caintegrator2/application/study/SampleMappingHelper.java
Mapping the sample mapping file is causing an exception. I'm breaking out the map method as an intermediate step and checking for a null so that this can be debugged.
Java
bsd-3-clause
4e1d2212f2f924862b69976b25db364ed61c061a
0
ValentinMinder/pocketcampus,ValentinMinder/pocketcampus,ValentinMinder/pocketcampus,ValentinMinder/pocketcampus,ValentinMinder/pocketcampus,ValentinMinder/pocketcampus,ValentinMinder/pocketcampus,ValentinMinder/pocketcampus,ValentinMinder/pocketcampus
package org.pocketcampus.plugin.moodle.server; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLDecoder; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.LinkedList; import java.util.Locale; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.thrift.TException; import org.eclipse.jetty.util.MultiMap; import org.eclipse.jetty.util.UrlEncoded; import org.pocketcampus.plugin.authentication.shared.TequilaToken; import org.pocketcampus.plugin.authentication.shared.utils.Cookie; import org.pocketcampus.plugin.moodle.shared.CoursesListReply; import org.pocketcampus.plugin.moodle.shared.EventsListReply; import org.pocketcampus.plugin.moodle.shared.MoodleAssignment; import org.pocketcampus.plugin.moodle.shared.MoodleCourse; import org.pocketcampus.plugin.moodle.shared.MoodleEvent; import org.pocketcampus.plugin.moodle.shared.MoodleEventType; import org.pocketcampus.plugin.moodle.shared.MoodleRequest; import org.pocketcampus.plugin.moodle.shared.MoodleResource; import org.pocketcampus.plugin.moodle.shared.MoodleSection; import org.pocketcampus.plugin.moodle.shared.MoodleService; import org.pocketcampus.plugin.moodle.shared.MoodleSession; import org.pocketcampus.plugin.moodle.shared.MoodleUserEvent; import org.pocketcampus.plugin.moodle.shared.SectionsListReply; /** * MoodleServiceImpl * * The implementation of the server side of the Moodle Plugin. * * It fetches the user's Moodle data from the Moodle servers. * * @author Amer <[email protected]> * */ public class MoodleServiceImpl implements MoodleService.Iface { public MoodleServiceImpl() { System.out.println("Starting Moodle plugin server ..."); } @Override public TequilaToken getTequilaTokenForMoodle() throws TException { System.out.println("getTequilaToken"); try { HttpURLConnection conn2 = (HttpURLConnection) new URL("http://moodle.epfl.ch/auth/tequila/index.php").openConnection(); conn2.setInstanceFollowRedirects(false); conn2.getInputStream(); URL url = new URL(conn2.getHeaderField("Location")); MultiMap<String> params = new MultiMap<String>(); UrlEncoded.decodeTo(url.getQuery(), params, "UTF-8"); TequilaToken teqToken = new TequilaToken(params.getString("requestkey")); Cookie cookie = new Cookie(); cookie.setCookie(conn2.getHeaderFields().get("Set-Cookie")); teqToken.setLoginCookie(cookie.cookie()); return teqToken; } catch (IOException e) { e.printStackTrace(); throw new TException("Failed to getTequilaToken from upstream server"); } } @Override public MoodleSession getMoodleSession(TequilaToken iTequilaToken) throws TException { System.out.println("getMoodleSession"); try { HttpURLConnection conn2 = (HttpURLConnection) new URL("http://moodle.epfl.ch/auth/tequila/index.php").openConnection(); conn2.setRequestProperty("Cookie", iTequilaToken.getLoginCookie()); conn2.setInstanceFollowRedirects(false); conn2.getInputStream(); if("http://moodle.epfl.ch/my/".equals(conn2.getHeaderField("Location"))) return new MoodleSession(iTequilaToken.getLoginCookie()); else throw new TException("Authentication failed"); } catch (IOException e) { e.printStackTrace(); throw new TException("Failed to getMoodleSession from upstream server"); } } @Override public CoursesListReply getCoursesList(MoodleRequest iRequest) throws TException { //iRequest.setICourseId(523);//tcpip //iRequest.setICourseId(225);//Course Demonstrator //iRequest.setICourseId(12101);//MI-023 //iRequest.setICourseId(12271);//CF10100009 //System.out.println(getCourseSections(iRequest)); //System.out.println(getEventsList(iRequest)); System.out.println("getCoursesList"); String page = null; Cookie cookie = new Cookie(); cookie.importFromString(iRequest.getISessionId().getMoodleCookie()); try { page = getPageWithCookie("http://moodle.epfl.ch/my/", cookie); } catch (IOException e) { e.printStackTrace(); return new CoursesListReply(404); } if(page == null || page.indexOf("login/index.php") != -1) { System.out.println("not logged in"); return new CoursesListReply(407); } //page = getSubstringBetween(page, "block_course_overview", "block_course_list"); LinkedList<MoodleCourse> tCourses = new LinkedList<MoodleCourse>(); for (String i : getAllSubstringsBetween(page, "coursebox", "</h3>")) { MoodleCourse mc = new MoodleCourse(); //mc.setITitle(StringEscapeUtils.unescapeHtml4(getLastSubstringBetween(i, ">", "</a>"))); String data = getSubstringBetween(i, "course/view.php", "</a>"); mc.setITitle(getSubstringBetween(data, ">", "<")); // "<" will not be found mc.setIId(Integer.parseInt(getSubstringBetween(data, "id=", "\""))); tCourses.add(mc); } CoursesListReply cl = new CoursesListReply(200); cl.setICourses(tCourses); return cl; } @Override public EventsListReply getEventsList(MoodleRequest iRequest) throws TException { // TODO this method was not checked against the new moodle System.out.println("getEventsList"); String page = null; Cookie cookie = new Cookie(); cookie.importFromString(iRequest.getISessionId().getMoodleCookie()); try { page = getPageWithCookie("http://moodle.epfl.ch/calendar/view.php", cookie); } catch (IOException e) { e.printStackTrace(); return new EventsListReply(404); } if(page == null || page.indexOf("login/index.php") != -1) { System.out.println("not logged in"); return new EventsListReply(407); } LinkedList<MoodleEvent> tEvents = new LinkedList<MoodleEvent>(); for (String i : getAllSubstringsBetween(page, "&lt;div&gt;", "&lt;/div&gt;")) { MoodleEvent mev = parseMoodleEvent(i); if(mev.getIType() == MoodleEventType.MOODLE_EVENT_ASSIGNMENT) { mev.setIAssignment(getAssignment(mev, cookie)); // TODO check if null } else if(mev.getIType() == MoodleEventType.MOODLE_EVENT_USEREVENT) { mev.setIUserEvent(getUserEvent(mev, cookie)); // TODO check if null } tEvents.add(mev); } EventsListReply el = new EventsListReply(200); el.setIEvents(tEvents); return el; } @Override public SectionsListReply getCourseSections(MoodleRequest iRequest) throws TException { System.out.println("getCourseSections"); String page = null; Cookie cookie = new Cookie(); cookie.importFromString(iRequest.getISessionId().getMoodleCookie()); if(!iRequest.isSetICourseId()) { return new SectionsListReply(405); } try { page = getPageWithCookie("http://moodle.epfl.ch/course/view.php?id=" + iRequest.getICourseId(), cookie); } catch (IOException e) { e.printStackTrace(); return new SectionsListReply(404); } if(page == null || page.indexOf("login/index.php") != -1) { System.out.println("not logged in"); return new SectionsListReply(407); } LinkedList<MoodleSection> msl = new LinkedList<MoodleSection>(); // don't close the quote because some sections have more style classes such as hidden for (String i : getAllSubstringsBetween(page, "class=\"section main clearfix", "<!--class='section'-->")) { LinkedList<MoodleResource> mrl = new LinkedList<MoodleResource>(); for (MoodleResource j : getLinks(i)) { if(j.getIUrl().indexOf("/pluginfile.php/") != -1) { // if it is a Moodle file, perfect mrl.add(j); } else if(j.getIUrl().indexOf("/mod/resource/view.php?") != -1 || j.getIUrl().indexOf("/mod/folder/view.php?") != -1) { // if it is a Moodle resource, get all files from it LinkedList<String> urls = getAllFilesFromMoodleResource(j.getIUrl(), cookie); for(String k : urls) { mrl.add(new MoodleResource(j.getIName(), k)); } } } MoodleSection ms = new MoodleSection(mrl, stripHtmlTags("<" + i + ">")); // TODO add optional fields (start date and end date) ms.setICurrent(i.startsWith(" current")); msl.add(ms); } SectionsListReply sl = new SectionsListReply(200); sl.setISections(msl); return sl; } /** * HELPER FUNCTIONS */ private class HttpPageReply { private String page; private String location; public HttpPageReply(String page, String location) { this.page = page; this.location = location; } public String getPage() { return page; } public String getLocation() { return location; } } private String getPageWithCookie(String url, Cookie cookie) throws IOException { return getHttpReplyWithCookie(url, cookie).getPage(); } private HttpPageReply getHttpReplyWithCookie(String url, Cookie cookie) throws IOException { HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection(); conn.setRequestProperty("User-Agent", "Mozilla/5.0"); conn.setInstanceFollowRedirects(false); conn.setRequestProperty("Cookie", cookie.cookie()); if(conn.getResponseCode() != 200) return new HttpPageReply(null, conn.getHeaderField("Location")); return new HttpPageReply(IOUtils.toString(conn.getInputStream(), "UTF-8"), null); } private String getSubstringBetween(String orig, String before, String after) { int b = orig.indexOf(before); if(b != -1) { orig = orig.substring(b + before.length()); } int a = orig.indexOf(after); if(a != -1) { orig = orig.substring(0, a); } return orig; } private String getLastSubstringBetween(String orig, String before, String after) { int a = orig.lastIndexOf(after); if(a != -1) { orig = orig.substring(0, a); } int b = orig.lastIndexOf(before); if(b != -1) { orig = orig.substring(b + before.length()); } return orig; } private LinkedList<String> getAllSubstringsBetween(String orig, String before, String after) { LinkedList<String> ssl = new LinkedList<String>(); if(orig.length() == 0 || before.length() == 0 || after.length() == 0) return ssl; while(true) { int b = orig.indexOf(before); if(b == -1) return ssl; int a = orig.indexOf(after, b + before.length()); if(a == -1) return ssl; b = orig.lastIndexOf(before, a - before.length()); ssl.add(orig.substring(b + before.length(), a)); orig = orig.substring(a + after.length()); } } private MoodleEvent parseMoodleEvent(String html) { int id = Integer.parseInt(getSubstringBetween(html, "event_", "&quot;")); String title = getSubstringBetween(html, "&quot;&gt;", "&lt;"); String datePart = getSubstringBetween(html, "view=day", "#"); int d = Integer.parseInt(getSubstringBetween(datePart, "_d=", "&")); int m = Integer.parseInt(getSubstringBetween(datePart, "_m=", "&")); int y = Integer.parseInt(getSubstringBetween(datePart, "_y=", "&")); Calendar cal = Calendar.getInstance(); cal.set(y, m - 1, d); long date = cal.getTimeInMillis(); //SimpleDateFormat sdf = new SimpleDateFormat("dd.mm.yy"); //datePart = sdf.format(cal.getTimeInMillis()getTime()); MoodleEventType type = MoodleEventType.MOODLE_EVENT_UNKNOWN; if(html.indexOf("c/user.gif") != -1) { type = MoodleEventType.MOODLE_EVENT_USEREVENT; } else if(html.indexOf("assignment/icon.gif") != -1) { type = MoodleEventType.MOODLE_EVENT_ASSIGNMENT; } return new MoodleEvent(id, title, date, type); } private MoodleAssignment getAssignment(MoodleEvent event, Cookie cookie) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(event.getIDate()); String url = "http://moodle.epfl.ch/calendar/view.php?view=day&cal_d=" + cal.get(Calendar.DATE) + "&cal_m=" + (cal.get(Calendar.MONTH) + 1) + "&cal_y=" + cal.get(Calendar.YEAR); String page = null; try { page = getPageWithCookie(url, cookie); } catch (IOException e) { e.printStackTrace(); return null; } if(page == null) { System.out.println("not logged in? now?"); return null; } page = getSubstringBetween(page, "<a name=\"event_" + event.getIId() + "\"></a>", "</table>"); page = getSubstringBetween(page, "assignment/view.php?id=", "\""); int id = Integer.parseInt(page); try { page = getPageWithCookie("http://moodle.epfl.ch/mod/assignment/view.php?id=" + id, cookie); } catch (IOException e) { e.printStackTrace(); return null; } if(page == null) { System.out.println("not logged in? now?"); return null; } String desc = getSubstringBetween(page, "id=\"intro\"", "id=\"dates\""); int b = desc.indexOf(">"); desc = desc.substring(b + 1); int a = desc.lastIndexOf("</div>"); desc = stripHtmlTags(desc.substring(0, a)); // <div id="dates" class="box generalbox generalboxcontent boxaligncenter"><table><tr><td class="c0">Disponible dès le:</td> <td class="c1">vendredi 9 décembre 2011, 13:40</td></tr><tr><td class="c0">À rendre jusqu'au:</td> <td class="c1">samedi 24 décembre 2011, 00:00</td></tr></table></div> String dateHTML = getSubstringBetween(page, "id=\"dates\"", "</div>"); LinkedList<String> byDate = getAllSubstringsBetween(dateHTML, "<td class=\"c1\">", "</td>"); Long postingDate = null; Long dueDate = null; if(byDate.size() > 1) { postingDate = parseDate(byDate.get(0)); dueDate = parseDate(byDate.get(1)); } else if(byDate.size() > 0) { dueDate = parseDate(byDate.get(0)); } if(dueDate == null) { // if cannot parse then keep previous imprecise date dueDate = cal.getTimeInMillis(); } String courseName = getSubstringBetween(page, "&amp;label=", "&amp;"); try { courseName = URLDecoder.decode(courseName, "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } String courseIdStr = getSubstringBetween(page, "course/view.php?id=", "\""); int courseId = Integer.parseInt(courseIdStr); MoodleAssignment ma = new MoodleAssignment(id, event.getITitle(), desc, new MoodleCourse(courseId, courseName), dueDate); if(postingDate != null) ma.setIPostingDate(postingDate); // TODO add grade if existent return ma; } private MoodleUserEvent getUserEvent(MoodleEvent event, Cookie cookie) { String page = null; try { page = getPageWithCookie("http://moodle.epfl.ch/calendar/event.php?action=edit&id=" + event.getIId(), cookie); } catch (IOException e) { e.printStackTrace(); return null; } if(page == null) { System.out.println("not logged in? now?"); return null; } String desc = getSubstringBetween(page, "id=\"edit-description\"", "id=\"edit-description\""); desc = stripHtmlTags(getSubstringBetween(desc, ">", "</textarea>")); int startday = getSelectedValue(getSubstringBetween(page, "name=\"startday\"", "</select>")); int startmon = getSelectedValue(getSubstringBetween(page, "name=\"startmon\"", "</select>")); int startyr = getSelectedValue(getSubstringBetween(page, "name=\"startyr\"", "</select>")); int starthr = getSelectedValue(getSubstringBetween(page, "name=\"starthr\"", "</select>")); int startmin = getSelectedValue(getSubstringBetween(page, "name=\"startmin\"", "</select>")); Calendar cal = Calendar.getInstance(); cal.set(startyr, startmon - 1, startday, starthr, startmin); MoodleUserEvent userEvent = new MoodleUserEvent(event.getIId(), event.getITitle(), desc, cal.getTimeInMillis()); if(page.indexOf("id=\"duration_none\" checked") == -1) { int endday = getSelectedValue(getSubstringBetween(page, "name=\"endday\"", "</select>")); int endmon = getSelectedValue(getSubstringBetween(page, "name=\"endmon\"", "</select>")); int endyr = getSelectedValue(getSubstringBetween(page, "name=\"endyr\"", "</select>")); int endhr = getSelectedValue(getSubstringBetween(page, "name=\"endhr\"", "</select>")); int endmin = getSelectedValue(getSubstringBetween(page, "name=\"endmin\"", "</select>")); cal.set(endyr, endmon - 1, endday, endhr, endmin); userEvent.setIEndDate(cal.getTimeInMillis()); } return userEvent; } private int getSelectedValue(String html) { html = getLastSubstringBetween(html, "value=", "selected="); html = getSubstringBetween(html, "\"", "\""); return Integer.parseInt(html); } private Long parseDate(String date) { // Try the 3 possible languages of Moodle. Calendar cal = Calendar.getInstance(); try { // Monday, 9 January 2012, 06:05 PM SimpleDateFormat sdf = new SimpleDateFormat("EEEE, d MMMM yyyy, hh:mm a", Locale.ENGLISH); cal.setTime(sdf.parse(date)); return cal.getTimeInMillis(); } catch (ParseException e1) { //e1.printStackTrace(); } try { // lundi 9 janvier 2012, 18:05 SimpleDateFormat sdf = new SimpleDateFormat("EEEE d MMMM yyyy, HH:mm", Locale.FRENCH); cal.setTime(sdf.parse(date)); return cal.getTimeInMillis(); } catch (ParseException e1) { //e1.printStackTrace(); } try { // Montag, 9. Januar 2012, 18:05 SimpleDateFormat sdf = new SimpleDateFormat("EEEE, d. MMMM yyyy, HH:mm", Locale.GERMAN); cal.setTime(sdf.parse(date)); return cal.getTimeInMillis(); } catch (ParseException e1) { //e1.printStackTrace(); } System.err.println("parseDate: failed to interpret date. what language you using? arabic?"); return null; } private LinkedList<String> getAllFilesFromMoodleResource(String resourceUrl, Cookie cookie) { LinkedList<String> urls = new LinkedList<String>(); HttpPageReply httpReply = null; try { httpReply = getHttpReplyWithCookie(resourceUrl, cookie); } catch (IOException e) { e.printStackTrace(); return urls; // failed, do not crash } if(httpReply.getPage() != null) { for (MoodleResource j : getLinks(httpReply.getPage())) { if(j.getIUrl().indexOf("/pluginfile.php/") != -1) if(!urls.contains(j.getIUrl())) urls.add(stripOffQueryString(j.getIUrl())); } } else { if(httpReply.getLocation().indexOf("/pluginfile.php/") != -1) urls.add(stripOffQueryString(httpReply.getLocation())); } return urls; } private String stripOffQueryString(String url) { if(url.indexOf("?") == -1) return url; return url.substring(0, url.indexOf("?")); } private LinkedList<MoodleResource> getLinks(String html) { LinkedList<MoodleResource> mrl = new LinkedList<MoodleResource>(); for (String j : getAllSubstringsBetween(html, "href=\"", "</a>")) { String url = j.substring(0, j.indexOf("\"")); // target String name = stripHtmlTags(j.substring(j.indexOf(">") + 1)); // innerHTML mrl.add(new MoodleResource(name, url)); } return mrl; } private String stripHtmlTags(String html) { // or keep it client-side // android.text.Html.fromHtml(instruction).toString() // should first remove invisible elements html = html.replaceAll("class=\"left side[^<]+<", ""); html = html.replaceAll("class=\"weekdates[^<]+<", ""); html = html.replaceAll("class=\"accesshide[^<]+<", ""); html = html.replaceAll("<br />", "\n"); html = html.replaceAll("<h2>", "\n"); html = html.replaceAll("</h2>", "\n"); html = html.replaceAll("<[^>]+>", ""); html = StringEscapeUtils.unescapeHtml4(html); html = html.replaceAll("[\\xA0]+", " "); // replace non-breaking spaces (code 160) with normal spaces (code 32) html = html.replaceAll("[\\t\\r\\v\\f]+", ""); // remove some weird characters html = html.replaceAll("[\\n][ ]+", "\n"); // remove spaces at the beginning of a line html = html.replaceAll("[ ]+[\\n]", "\n"); // remove spaces at the end of a line html = html.replaceAll("[ ]+", " "); // remove consecutive spaces html = html.replaceAll("[\\n]+", "\n"); // remove consecutive new-lines html = html.replaceAll("^[\\n]+", ""); // remove new-line characters at the beginning html = html.replaceAll("[\\n]+$", ""); // remove new-line characters at the end return html.trim(); } }
plugin/moodle/plugin.moodle.server/src/org/pocketcampus/plugin/moodle/server/MoodleServiceImpl.java
package org.pocketcampus.plugin.moodle.server; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLDecoder; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.LinkedList; import java.util.Locale; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.thrift.TException; import org.eclipse.jetty.util.MultiMap; import org.eclipse.jetty.util.UrlEncoded; import org.pocketcampus.plugin.authentication.shared.TequilaToken; import org.pocketcampus.plugin.authentication.shared.utils.Cookie; import org.pocketcampus.plugin.moodle.shared.CoursesListReply; import org.pocketcampus.plugin.moodle.shared.EventsListReply; import org.pocketcampus.plugin.moodle.shared.MoodleAssignment; import org.pocketcampus.plugin.moodle.shared.MoodleCourse; import org.pocketcampus.plugin.moodle.shared.MoodleEvent; import org.pocketcampus.plugin.moodle.shared.MoodleEventType; import org.pocketcampus.plugin.moodle.shared.MoodleRequest; import org.pocketcampus.plugin.moodle.shared.MoodleResource; import org.pocketcampus.plugin.moodle.shared.MoodleSection; import org.pocketcampus.plugin.moodle.shared.MoodleService; import org.pocketcampus.plugin.moodle.shared.MoodleSession; import org.pocketcampus.plugin.moodle.shared.MoodleUserEvent; import org.pocketcampus.plugin.moodle.shared.SectionsListReply; /** * MoodleServiceImpl * * The implementation of the server side of the Moodle Plugin. * * It fetches the user's Moodle data from the Moodle servers. * * @author Amer <[email protected]> * */ public class MoodleServiceImpl implements MoodleService.Iface { public MoodleServiceImpl() { System.out.println("Starting Moodle plugin server ..."); } @Override public TequilaToken getTequilaTokenForMoodle() throws TException { System.out.println("getTequilaToken"); try { HttpURLConnection conn2 = (HttpURLConnection) new URL("http://moodle.epfl.ch/auth/tequila/index.php").openConnection(); conn2.setInstanceFollowRedirects(false); conn2.getInputStream(); URL url = new URL(conn2.getHeaderField("Location")); MultiMap<String> params = new MultiMap<String>(); UrlEncoded.decodeTo(url.getQuery(), params, "UTF-8"); TequilaToken teqToken = new TequilaToken(params.getString("requestkey")); Cookie cookie = new Cookie(); cookie.setCookie(conn2.getHeaderFields().get("Set-Cookie")); teqToken.setLoginCookie(cookie.cookie()); return teqToken; } catch (IOException e) { e.printStackTrace(); throw new TException("Failed to getTequilaToken from upstream server"); } } @Override public MoodleSession getMoodleSession(TequilaToken iTequilaToken) throws TException { System.out.println("getMoodleSession"); try { HttpURLConnection conn2 = (HttpURLConnection) new URL("http://moodle.epfl.ch/auth/tequila/index.php").openConnection(); conn2.setRequestProperty("Cookie", iTequilaToken.getLoginCookie()); conn2.setInstanceFollowRedirects(false); conn2.getInputStream(); if("http://moodle.epfl.ch/my/".equals(conn2.getHeaderField("Location"))) return new MoodleSession(iTequilaToken.getLoginCookie()); else throw new TException("Authentication failed"); } catch (IOException e) { e.printStackTrace(); throw new TException("Failed to getMoodleSession from upstream server"); } } @Override public CoursesListReply getCoursesList(MoodleRequest iRequest) throws TException { //iRequest.setICourseId(523);//tcpip //iRequest.setICourseId(225);//Course Demonstrator //iRequest.setICourseId(12101);//MI-023 //iRequest.setICourseId(12271);//CF10100009 //System.out.println(getCourseSections(iRequest)); //System.out.println(getEventsList(iRequest)); System.out.println("getCoursesList"); String page = null; Cookie cookie = new Cookie(); cookie.importFromString(iRequest.getISessionId().getMoodleCookie()); try { page = getPageWithCookie("http://moodle.epfl.ch/my/", cookie); } catch (IOException e) { e.printStackTrace(); return new CoursesListReply(404); } if(page == null || page.indexOf("login/index.php") != -1) { System.out.println("not logged in"); return new CoursesListReply(407); } //page = getSubstringBetween(page, "block_course_overview", "block_course_list"); LinkedList<MoodleCourse> tCourses = new LinkedList<MoodleCourse>(); for (String i : getAllSubstringsBetween(page, "coursebox", "</h3>")) { MoodleCourse mc = new MoodleCourse(); //mc.setITitle(StringEscapeUtils.unescapeHtml4(getLastSubstringBetween(i, ">", "</a>"))); String data = getSubstringBetween(i, "course/view.php", "</a>"); mc.setITitle(getSubstringBetween(data, ">", "<")); // "<" will not be found mc.setIId(Integer.parseInt(getSubstringBetween(data, "id=", "\""))); tCourses.add(mc); } CoursesListReply cl = new CoursesListReply(200); cl.setICourses(tCourses); return cl; } @Override public EventsListReply getEventsList(MoodleRequest iRequest) throws TException { // TODO this method was not checked against the new moodle System.out.println("getEventsList"); String page = null; Cookie cookie = new Cookie(); cookie.importFromString(iRequest.getISessionId().getMoodleCookie()); try { page = getPageWithCookie("http://moodle.epfl.ch/calendar/view.php", cookie); } catch (IOException e) { e.printStackTrace(); return new EventsListReply(404); } if(page == null || page.indexOf("login/index.php") != -1) { System.out.println("not logged in"); return new EventsListReply(407); } LinkedList<MoodleEvent> tEvents = new LinkedList<MoodleEvent>(); for (String i : getAllSubstringsBetween(page, "&lt;div&gt;", "&lt;/div&gt;")) { MoodleEvent mev = parseMoodleEvent(i); if(mev.getIType() == MoodleEventType.MOODLE_EVENT_ASSIGNMENT) { mev.setIAssignment(getAssignment(mev, cookie)); // TODO check if null } else if(mev.getIType() == MoodleEventType.MOODLE_EVENT_USEREVENT) { mev.setIUserEvent(getUserEvent(mev, cookie)); // TODO check if null } tEvents.add(mev); } EventsListReply el = new EventsListReply(200); el.setIEvents(tEvents); return el; } @Override public SectionsListReply getCourseSections(MoodleRequest iRequest) throws TException { System.out.println("getCourseSections"); String page = null; Cookie cookie = new Cookie(); cookie.importFromString(iRequest.getISessionId().getMoodleCookie()); if(!iRequest.isSetICourseId()) { return new SectionsListReply(405); } try { page = getPageWithCookie("http://moodle.epfl.ch/course/view.php?id=" + iRequest.getICourseId(), cookie); } catch (IOException e) { e.printStackTrace(); return new SectionsListReply(404); } if(page == null || page.indexOf("login/index.php") != -1) { System.out.println("not logged in"); return new SectionsListReply(407); } LinkedList<MoodleSection> msl = new LinkedList<MoodleSection>(); // don't close the quote because some sections have more style classes such as hidden for (String i : getAllSubstringsBetween(page, "class=\"section main", "<!--class='section'-->")) { LinkedList<MoodleResource> mrl = new LinkedList<MoodleResource>(); for (MoodleResource j : getLinks(i)) { if(j.getIUrl().indexOf("/pluginfile.php/") != -1) { // if it is a Moodle file, perfect mrl.add(j); } else if(j.getIUrl().indexOf("/mod/resource/view.php?") != -1 || j.getIUrl().indexOf("/mod/folder/view.php?") != -1) { // if it is a Moodle resource, get all files from it LinkedList<String> urls = getAllFilesFromMoodleResource(j.getIUrl(), cookie); for(String k : urls) { mrl.add(new MoodleResource(j.getIName(), k)); } } } MoodleSection ms = new MoodleSection(mrl, stripHtmlTags("<" + i + ">")); // TODO add optional fields (start date and end date) ms.setICurrent(i.startsWith(" current")); msl.add(ms); } SectionsListReply sl = new SectionsListReply(200); sl.setISections(msl); return sl; } /** * HELPER FUNCTIONS */ private class HttpPageReply { private String page; private String location; public HttpPageReply(String page, String location) { this.page = page; this.location = location; } public String getPage() { return page; } public String getLocation() { return location; } } private String getPageWithCookie(String url, Cookie cookie) throws IOException { return getHttpReplyWithCookie(url, cookie).getPage(); } private HttpPageReply getHttpReplyWithCookie(String url, Cookie cookie) throws IOException { HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection(); conn.setRequestProperty("User-Agent", "Mozilla/5.0"); conn.setInstanceFollowRedirects(false); conn.setRequestProperty("Cookie", cookie.cookie()); if(conn.getResponseCode() != 200) return new HttpPageReply(null, conn.getHeaderField("Location")); return new HttpPageReply(IOUtils.toString(conn.getInputStream(), "UTF-8"), null); } private String getSubstringBetween(String orig, String before, String after) { int b = orig.indexOf(before); if(b != -1) { orig = orig.substring(b + before.length()); } int a = orig.indexOf(after); if(a != -1) { orig = orig.substring(0, a); } return orig; } private String getLastSubstringBetween(String orig, String before, String after) { int a = orig.lastIndexOf(after); if(a != -1) { orig = orig.substring(0, a); } int b = orig.lastIndexOf(before); if(b != -1) { orig = orig.substring(b + before.length()); } return orig; } private LinkedList<String> getAllSubstringsBetween(String orig, String before, String after) { LinkedList<String> ssl = new LinkedList<String>(); if(orig.length() == 0 || before.length() == 0 || after.length() == 0) return ssl; while(true) { int b = orig.indexOf(before); if(b == -1) return ssl; int a = orig.indexOf(after, b + before.length()); if(a == -1) return ssl; b = orig.lastIndexOf(before, a - before.length()); ssl.add(orig.substring(b + before.length(), a)); orig = orig.substring(a + after.length()); } } private MoodleEvent parseMoodleEvent(String html) { int id = Integer.parseInt(getSubstringBetween(html, "event_", "&quot;")); String title = getSubstringBetween(html, "&quot;&gt;", "&lt;"); String datePart = getSubstringBetween(html, "view=day", "#"); int d = Integer.parseInt(getSubstringBetween(datePart, "_d=", "&")); int m = Integer.parseInt(getSubstringBetween(datePart, "_m=", "&")); int y = Integer.parseInt(getSubstringBetween(datePart, "_y=", "&")); Calendar cal = Calendar.getInstance(); cal.set(y, m - 1, d); long date = cal.getTimeInMillis(); //SimpleDateFormat sdf = new SimpleDateFormat("dd.mm.yy"); //datePart = sdf.format(cal.getTimeInMillis()getTime()); MoodleEventType type = MoodleEventType.MOODLE_EVENT_UNKNOWN; if(html.indexOf("c/user.gif") != -1) { type = MoodleEventType.MOODLE_EVENT_USEREVENT; } else if(html.indexOf("assignment/icon.gif") != -1) { type = MoodleEventType.MOODLE_EVENT_ASSIGNMENT; } return new MoodleEvent(id, title, date, type); } private MoodleAssignment getAssignment(MoodleEvent event, Cookie cookie) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(event.getIDate()); String url = "http://moodle.epfl.ch/calendar/view.php?view=day&cal_d=" + cal.get(Calendar.DATE) + "&cal_m=" + (cal.get(Calendar.MONTH) + 1) + "&cal_y=" + cal.get(Calendar.YEAR); String page = null; try { page = getPageWithCookie(url, cookie); } catch (IOException e) { e.printStackTrace(); return null; } if(page == null) { System.out.println("not logged in? now?"); return null; } page = getSubstringBetween(page, "<a name=\"event_" + event.getIId() + "\"></a>", "</table>"); page = getSubstringBetween(page, "assignment/view.php?id=", "\""); int id = Integer.parseInt(page); try { page = getPageWithCookie("http://moodle.epfl.ch/mod/assignment/view.php?id=" + id, cookie); } catch (IOException e) { e.printStackTrace(); return null; } if(page == null) { System.out.println("not logged in? now?"); return null; } String desc = getSubstringBetween(page, "id=\"intro\"", "id=\"dates\""); int b = desc.indexOf(">"); desc = desc.substring(b + 1); int a = desc.lastIndexOf("</div>"); desc = stripHtmlTags(desc.substring(0, a)); // <div id="dates" class="box generalbox generalboxcontent boxaligncenter"><table><tr><td class="c0">Disponible dès le:</td> <td class="c1">vendredi 9 décembre 2011, 13:40</td></tr><tr><td class="c0">À rendre jusqu'au:</td> <td class="c1">samedi 24 décembre 2011, 00:00</td></tr></table></div> String dateHTML = getSubstringBetween(page, "id=\"dates\"", "</div>"); LinkedList<String> byDate = getAllSubstringsBetween(dateHTML, "<td class=\"c1\">", "</td>"); Long postingDate = null; Long dueDate = null; if(byDate.size() > 1) { postingDate = parseDate(byDate.get(0)); dueDate = parseDate(byDate.get(1)); } else if(byDate.size() > 0) { dueDate = parseDate(byDate.get(0)); } if(dueDate == null) { // if cannot parse then keep previous imprecise date dueDate = cal.getTimeInMillis(); } String courseName = getSubstringBetween(page, "&amp;label=", "&amp;"); try { courseName = URLDecoder.decode(courseName, "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } String courseIdStr = getSubstringBetween(page, "course/view.php?id=", "\""); int courseId = Integer.parseInt(courseIdStr); MoodleAssignment ma = new MoodleAssignment(id, event.getITitle(), desc, new MoodleCourse(courseId, courseName), dueDate); if(postingDate != null) ma.setIPostingDate(postingDate); // TODO add grade if existent return ma; } private MoodleUserEvent getUserEvent(MoodleEvent event, Cookie cookie) { String page = null; try { page = getPageWithCookie("http://moodle.epfl.ch/calendar/event.php?action=edit&id=" + event.getIId(), cookie); } catch (IOException e) { e.printStackTrace(); return null; } if(page == null) { System.out.println("not logged in? now?"); return null; } String desc = getSubstringBetween(page, "id=\"edit-description\"", "id=\"edit-description\""); desc = stripHtmlTags(getSubstringBetween(desc, ">", "</textarea>")); int startday = getSelectedValue(getSubstringBetween(page, "name=\"startday\"", "</select>")); int startmon = getSelectedValue(getSubstringBetween(page, "name=\"startmon\"", "</select>")); int startyr = getSelectedValue(getSubstringBetween(page, "name=\"startyr\"", "</select>")); int starthr = getSelectedValue(getSubstringBetween(page, "name=\"starthr\"", "</select>")); int startmin = getSelectedValue(getSubstringBetween(page, "name=\"startmin\"", "</select>")); Calendar cal = Calendar.getInstance(); cal.set(startyr, startmon - 1, startday, starthr, startmin); MoodleUserEvent userEvent = new MoodleUserEvent(event.getIId(), event.getITitle(), desc, cal.getTimeInMillis()); if(page.indexOf("id=\"duration_none\" checked") == -1) { int endday = getSelectedValue(getSubstringBetween(page, "name=\"endday\"", "</select>")); int endmon = getSelectedValue(getSubstringBetween(page, "name=\"endmon\"", "</select>")); int endyr = getSelectedValue(getSubstringBetween(page, "name=\"endyr\"", "</select>")); int endhr = getSelectedValue(getSubstringBetween(page, "name=\"endhr\"", "</select>")); int endmin = getSelectedValue(getSubstringBetween(page, "name=\"endmin\"", "</select>")); cal.set(endyr, endmon - 1, endday, endhr, endmin); userEvent.setIEndDate(cal.getTimeInMillis()); } return userEvent; } private int getSelectedValue(String html) { html = getLastSubstringBetween(html, "value=", "selected="); html = getSubstringBetween(html, "\"", "\""); return Integer.parseInt(html); } private Long parseDate(String date) { // Try the 3 possible languages of Moodle. Calendar cal = Calendar.getInstance(); try { // Monday, 9 January 2012, 06:05 PM SimpleDateFormat sdf = new SimpleDateFormat("EEEE, d MMMM yyyy, hh:mm a", Locale.ENGLISH); cal.setTime(sdf.parse(date)); return cal.getTimeInMillis(); } catch (ParseException e1) { //e1.printStackTrace(); } try { // lundi 9 janvier 2012, 18:05 SimpleDateFormat sdf = new SimpleDateFormat("EEEE d MMMM yyyy, HH:mm", Locale.FRENCH); cal.setTime(sdf.parse(date)); return cal.getTimeInMillis(); } catch (ParseException e1) { //e1.printStackTrace(); } try { // Montag, 9. Januar 2012, 18:05 SimpleDateFormat sdf = new SimpleDateFormat("EEEE, d. MMMM yyyy, HH:mm", Locale.GERMAN); cal.setTime(sdf.parse(date)); return cal.getTimeInMillis(); } catch (ParseException e1) { //e1.printStackTrace(); } System.err.println("parseDate: failed to interpret date. what language you using? arabic?"); return null; } private LinkedList<String> getAllFilesFromMoodleResource(String resourceUrl, Cookie cookie) { LinkedList<String> urls = new LinkedList<String>(); HttpPageReply httpReply = null; try { httpReply = getHttpReplyWithCookie(resourceUrl, cookie); } catch (IOException e) { e.printStackTrace(); return urls; // failed, do not crash } if(httpReply.getPage() != null) { for (MoodleResource j : getLinks(httpReply.getPage())) { if(j.getIUrl().indexOf("/pluginfile.php/") != -1) if(!urls.contains(j.getIUrl())) urls.add(stripOffQueryString(j.getIUrl())); } } else { if(httpReply.getLocation().indexOf("/pluginfile.php/") != -1) urls.add(stripOffQueryString(httpReply.getLocation())); } return urls; } private String stripOffQueryString(String url) { if(url.indexOf("?") == -1) return url; return url.substring(0, url.indexOf("?")); } private LinkedList<MoodleResource> getLinks(String html) { LinkedList<MoodleResource> mrl = new LinkedList<MoodleResource>(); for (String j : getAllSubstringsBetween(html, "href=\"", "</a>")) { String url = j.substring(0, j.indexOf("\"")); // target String name = stripHtmlTags(j.substring(j.indexOf(">") + 1)); // innerHTML mrl.add(new MoodleResource(name, url)); } return mrl; } private String stripHtmlTags(String html) { // or keep it client-side // android.text.Html.fromHtml(instruction).toString() // should first remove invisible elements html = html.replaceAll("class=\"left side[^<]+<", ""); html = html.replaceAll("class=\"weekdates[^<]+<", ""); html = html.replaceAll("class=\"accesshide[^<]+<", ""); html = html.replaceAll("<br />", "\n"); html = html.replaceAll("<h2>", "\n"); html = html.replaceAll("</h2>", "\n"); html = html.replaceAll("<[^>]+>", ""); html = StringEscapeUtils.unescapeHtml4(html); html = html.replaceAll("[\\xA0]+", " "); // replace non-breaking spaces (code 160) with normal spaces (code 32) html = html.replaceAll("[\\t\\r\\v\\f]+", ""); // remove some weird characters html = html.replaceAll("[\\n][ ]+", "\n"); // remove spaces at the beginning of a line html = html.replaceAll("[ ]+[\\n]", "\n"); // remove spaces at the end of a line html = html.replaceAll("[ ]+", " "); // remove consecutive spaces html = html.replaceAll("[\\n]+", "\n"); // remove consecutive new-lines html = html.replaceAll("^[\\n]+", ""); // remove new-line characters at the beginning html = html.replaceAll("[\\n]+$", ""); // remove new-line characters at the end return html.trim(); } }
[moodle] [server] fix current week
plugin/moodle/plugin.moodle.server/src/org/pocketcampus/plugin/moodle/server/MoodleServiceImpl.java
[moodle] [server] fix current week
Java
mit
c1ffe05dc24579368e86742d17506e2a46cd6278
0
eldering/autoanalyst,eldering/autoanalyst,eldering/autoanalyst,eldering/autoanalyst,eldering/autoanalyst,eldering/autoanalyst,eldering/autoanalyst
package teamscore; import java.util.ArrayList; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.log4j.Logger; import web.StaticWebDocument; import web.WebPublisher; import model.Contest; import model.FakeScore; import model.OutputHook; import model.Problem; import model.Score; import model.ScoreTableComparer; import model.ScoreTableEntry; import model.Standings; import model.StandingsPublisher; import model.Team; public class ExtendedScoreDump implements OutputHook, StandingsPublisher { static final Logger log = Logger.getLogger(ExtendedScoreDump.class); final Contest contest; final WebPublisher publisherTarget; final static ScoreTableComparer comparator = new ScoreTableComparer(); class ScoreDumper { Standings standings; int minutesFromStart; ArrayList<Score> scoresAbove = new ArrayList<Score>(); public ScoreDumper(Standings standings, int minutesFromStart) { this.standings = standings; this.minutesFromStart = minutesFromStart; } public JSONObject DumpScore(Score score) { Team team = score.getTeam(); JSONArray problems = new JSONArray(); int place = scoresAbove.size(); for (Problem p : contest.getProblems()) { boolean isSolved = score.isSolved(p); JSONObject problemInfo = new JSONObject() .element("id", p.getLetter()) .element("solved", isSolved) .element("attempts", score.submissionCount(p)) .element("time", score.scoreContribution(p)); int lastSubmissionTime = score.lastSubmissionTime(p); if (lastSubmissionTime != 0) { problemInfo = problemInfo.element("lastUpd", lastSubmissionTime); } if (!isSolved) { ScoreTableEntry fake = FakeScore.PretendProblemSolved(score, p, minutesFromStart); JSONObject potential = new JSONObject(); place = calcFictiousRank(scoresAbove, fake, place, potential); problemInfo = problemInfo.element("potential", potential); } String language = team.languageFor(p); if (language != null) { problemInfo = problemInfo.element("lang", language); } problems.add(problemInfo); } JSONObject target = new JSONObject() .element("rank", standings.rankOf(team)) .element("team", new JSONObject() .element("id", team.getTeamNumber()) .element("tag", team.toString()) .element("name", team.getName())) .element("nSolved", score.getNumberOfSolvedProblems()) .element("totalTime", score.getTimeIncludingPenalty()) .element("mainLang", team.getMainLanguage()) .element("problems", problems); return target; } private JSONArray getProblems(Contest contest) { JSONArray result = new JSONArray(); for (Problem p : contest.getProblems()) { JSONObject problemInfo = new JSONObject() .element("tag", p.getLetter()) .element("name", p.getName()); result.add(problemInfo); } return result; } private JSONObject getContestInfo(Contest contest) { return new JSONObject() .element("length", contest.getLengthInMinutes()) .element("problems", getProblems(contest)) .element("submissions", contest.getSubmissionCount()) .element("time", contest.getMinutesFromStart()); } public String execute() { scoresAbove.clear(); JSONArray resultArray = new JSONArray(); ArrayList<JSONObject> jsonScores = new ArrayList<JSONObject>(); for (Score score : standings) { scoresAbove.add(score); jsonScores.add(DumpScore(score)); } resultArray.addAll(jsonScores); JSONObject contestInfo = getContestInfo(standings.getContest()); JSONObject contestStatus = new JSONObject() .element("scoreBoard", resultArray) .element("contestInfo", contestInfo); return contestStatus.toString(); } private int calcFictiousRank(ArrayList<Score> scoresAbove, ScoreTableEntry fake, int startFrom, JSONObject result) { int fakeIndex = startFrom; while (fakeIndex > 0 && comparator.compare(fake, scoresAbove.get(fakeIndex - 1)) <= 0) { fakeIndex--; } while (fakeIndex < scoresAbove.size() && comparator.compare(fake, scoresAbove.get(fakeIndex)) > 0) { fakeIndex++; } int margin = -1; result.element("rank", fakeIndex + 1); if (fakeIndex < scoresAbove.size()) { ScoreTableEntry next = scoresAbove.get(fakeIndex); if (next.getNumberOfSolvedProblems() == fake.getNumberOfSolvedProblems()) { margin = next.getTimeIncludingPenalty() - fake.getTimeIncludingPenalty(); result.element("before", margin); } } return fakeIndex; } } public ExtendedScoreDump(Contest contest, WebPublisher target) { this.contest = contest; this.publisherTarget = target; } public void publishStandings() { int minutesFromStart = contest.getMinutesFromStart(); int submissionsAtTime = contest.getSubmissionsAtTime(minutesFromStart); ScoreDumper scoreDumper = new ScoreDumper(contest.getStandings(submissionsAtTime), minutesFromStart); String scoreTable = scoreDumper.execute(); StaticWebDocument scoreDoc = new StaticWebDocument("application/json", scoreTable); log.debug("publishing Standings... " + minutesFromStart); publisherTarget.publish("/Standings", scoreDoc); } @Override public void execute(int minutesFromStart) { log.debug("preparing Standings... " + minutesFromStart); int submissionsAtTime = contest.getSubmissionsAtTime(minutesFromStart); ScoreDumper scoreDumper = new ScoreDumper(contest.getStandings(submissionsAtTime), minutesFromStart); log.debug("dumping Standings... " + minutesFromStart); String scoreTable = scoreDumper.execute(); StaticWebDocument scoreDoc = new StaticWebDocument("application/json", scoreTable); log.debug("publishing Standings... " + minutesFromStart); publisherTarget.publish("/Standings", scoreDoc); publisherTarget.publish(String.format("/Standings.%03d", minutesFromStart), scoreDoc); log.debug("done publishing Standings... " + minutesFromStart); } }
katalyze/src/teamscore/ExtendedScoreDump.java
package teamscore; import java.util.ArrayList; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.log4j.Logger; import web.StaticWebDocument; import web.WebPublisher; import model.Contest; import model.FakeScore; import model.OutputHook; import model.Problem; import model.Score; import model.ScoreTableComparer; import model.ScoreTableEntry; import model.Standings; import model.StandingsPublisher; import model.Team; public class ExtendedScoreDump implements OutputHook, StandingsPublisher { static final Logger log = Logger.getLogger(ExtendedScoreDump.class); final Contest contest; final WebPublisher publisherTarget; final static ScoreTableComparer comparator = new ScoreTableComparer(); class ScoreDumper { Standings standings; int minutesFromStart; ArrayList<Score> scoresAbove = new ArrayList<Score>(); public ScoreDumper(Standings standings, int minutesFromStart) { this.standings = standings; this.minutesFromStart = minutesFromStart; } public JSONObject DumpScore(Score score) { Team team = score.getTeam(); JSONArray problems = new JSONArray(); int place = scoresAbove.size(); for (Problem p : contest.getProblems()) { boolean isSolved = score.isSolved(p); JSONObject problemInfo = new JSONObject() .element("id", p.getLetter()) .element("solved", isSolved) .element("attempts", score.submissionCount(p)) .element("time", score.scoreContribution(p)); int lastSubmissionTime = score.lastSubmissionTime(p); if (lastSubmissionTime != 0) { problemInfo = problemInfo.element("lastUpd", lastSubmissionTime); } if (!isSolved) { ScoreTableEntry fake = FakeScore.PretendProblemSolved(score, p, minutesFromStart); JSONObject potential = new JSONObject(); place = calcFictiousRank(scoresAbove, fake, place, potential); problemInfo = problemInfo.element("potential", potential); } String language = team.languageFor(p); if (language != null) { problemInfo = problemInfo.element("lang", language); } problems.add(problemInfo); } JSONObject target = new JSONObject() .element("rank", standings.rankOf(team)) .element("team", new JSONObject() .element("id", team.getTeamNumber()) .element("tag", team.toString()) .element("name", team.getName())) .element("nSolved", score.getNumberOfSolvedProblems()) .element("totalTime", score.getTimeIncludingPenalty()) .element("mainLang", team.getMainLanguage()) .element("problems", problems); return target; } private JSONArray getProblems(Contest contest) { JSONArray result = new JSONArray(); for (Problem p : contest.getProblems()) { JSONObject problemInfo = new JSONObject() .element("tag", p.getLetter()) .element("name", p.getName()); result.add(problemInfo); } return result; } private JSONObject getContestInfo(Contest contest) { return new JSONObject() .element("length", contest.getLengthInMinutes()) .element("problems", getProblems(contest)) .element("submissions", contest.getSubmissionCount()) .element("time", contest.getMinutesFromStart()); } public String execute() { scoresAbove.clear(); JSONArray resultArray = new JSONArray(); ArrayList<JSONObject> jsonScores = new ArrayList<JSONObject>(); for (Score score : standings) { scoresAbove.add(score); jsonScores.add(DumpScore(score)); } resultArray.addAll(jsonScores); JSONObject contestInfo = getContestInfo(standings.getContest()); JSONObject contestStatus = new JSONObject() .element("scoreBoard", resultArray) .element("contestInfo", contestInfo); return contestStatus.toString(); } private int calcFictiousRank(ArrayList<Score> scoresAbove, ScoreTableEntry fake, int startFrom, JSONObject result) { int fakeIndex = startFrom; while (fakeIndex > 0 && comparator.compare(fake, scoresAbove.get(fakeIndex - 1)) <= 0) { fakeIndex--; } while (fakeIndex < scoresAbove.size() && comparator.compare(fake, scoresAbove.get(fakeIndex)) > 0) { fakeIndex++; } int margin = -1; result.element("rank", fakeIndex + 1); if (fakeIndex < scoresAbove.size()) { ScoreTableEntry next = scoresAbove.get(fakeIndex); if (next.getNumberOfSolvedProblems() == fake.getNumberOfSolvedProblems()) { margin = next.getTimeIncludingPenalty() - fake.getTimeIncludingPenalty(); result.element("before", margin); } } return fakeIndex; } } public ExtendedScoreDump(Contest contest, WebPublisher target) { this.contest = contest; this.publisherTarget = target; } public void publishStandings() { int minutesFromStart = contest.getMinutesFromStart(); int submissionsAtTime = contest.getSubmissionsAtTime(minutesFromStart); ScoreDumper scoreDumper = new ScoreDumper(contest.getStandings(submissionsAtTime), minutesFromStart); String scoreTable = scoreDumper.execute(); StaticWebDocument scoreDoc = new StaticWebDocument("application/json", scoreTable); log.debug("publishing Standings... " + minutesFromStart); publisherTarget.publish("/Standings", scoreDoc); } @Override public void execute(int minutesFromStart) { log.debug("preparing Standings... " + minutesFromStart); int submissionsAtTime = contest.getSubmissionsAtTime(minutesFromStart); ScoreDumper scoreDumper = new ScoreDumper(contest.getStandings(submissionsAtTime), minutesFromStart); log.debug("dumping Standings... " + minutesFromStart); String scoreTable = scoreDumper.execute(); StaticWebDocument scoreDoc = new StaticWebDocument("application/json", scoreTable); log.debug("publishing Standings... " + minutesFromStart); publisherTarget.publish("/Standings", scoreDoc); publisherTarget.publish(String.format("/Standings.%03d", minutesFromStart), scoreDoc); log.debug("done publishing Standings... " + minutesFromStart); } }
Fix code indentation.
katalyze/src/teamscore/ExtendedScoreDump.java
Fix code indentation.
Java
mit
8b4cc2ac20b9eb1af8ff93feb31877ff3fb6ad39
0
david540/cpp-2a-info,matthieu637/cpp-2a-info,david540/cpp-2a-info,matthieu637/cpp-2a-info,matthieu637/cpp-2a-info,david540/cpp-2a-info
package core; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang3.tuple.Pair; public class Marche { private boolean ouvert; private boolean fini; private long debut; private Map<Action, Set<Ordre>> liste_achats; private Map<Action, Set<Ordre>> liste_ventes; private List<Joueur> liste_joueurs; private Set<Integer> liste_id_ordres; private Map<Action, Set<Echange>> historiques,histoPartiel; private final Lock mutex = new ReentrantLock(); private Thread timer = null; public Marche() { ouvert = false; fini = false; liste_achats = new HashMap<>(); liste_ventes = new HashMap<>(); historiques = new HashMap<>(); for (Action a : Action.values()) { liste_achats.put(a, new TreeSet<Ordre>()); liste_ventes.put(a, new TreeSet<Ordre>()); historiques.put(a, new TreeSet<Echange>()); } liste_joueurs = new LinkedList<>(); liste_id_ordres = new TreeSet<>(); if(Config.getInstance().BANQUE){ Joueur banque = creer_joueur("banque"); banque.setSolde_euros(Integer.MAX_VALUE); int max_action_en_jeu = Config.getInstance().SOLDE_ACTIONS_INIT*Action.values().length*100;//100 joueurs for(Action a : Action.values()){ banque.getSolde_actions().put(a, Integer.MAX_VALUE); achat(banque, a, 0.25f, max_action_en_jeu); vend(banque, a, 25.0f, max_action_en_jeu); } } } public boolean est_ouvert() { return ouvert; } public void commence() { ouvert = true; debut = System.currentTimeMillis(); timer = new Thread() { @Override public void run() { try { Thread.sleep(1000 * 60 * Config.getInstance().TEMPS_PARTIE); } catch (InterruptedException e) { //might be interrupted if creator leaves //don't print error in this case } fini = true; try { Thread.sleep(1000 * 60); // sleep 1 s to be sure fini is taking into account } catch (InterruptedException e) { //might be interrupted if creator leaves //don't print error in this case } //fermer tous les ordres for(Joueur joueur : liste_joueurs){ List<Integer> ordre_supprimer = new LinkedList<>(); for(Pair<Integer, Ordre> t : joueur.getOperationsOuvertes()){ ordre_supprimer.add(t.getLeft()); } for(Integer i : ordre_supprimer){ annuler(joueur, i); } } } }; timer.start(); } public boolean est_fini() { return fini; } public synchronized Joueur creer_joueur(String nom) { Joueur j = new Joueur(nom); liste_joueurs.add(j); return j; } public synchronized boolean nom_possible(String nom) { for (Joueur j : liste_joueurs) if (j.getNom().equalsIgnoreCase(nom)) return false; return true; } public Set<Ordre> getListeAchats(Action a) { return liste_achats.get(a); } public Set<Ordre> getListeVentes(Action a) { return liste_ventes.get(a); } public Set<Echange> getHistoriqueEchanges(Action a,int n) { List<Echange> list = new ArrayList<>(); final Iterator<Echange> i = historiques.get(a).iterator(); for(int j=0;j<n;j++) i.next(); for (int j=n; j<historiques.get(a).size() && i.hasNext();j++) list.add(i.next()); //historiques.get(a) Set<Echange> retour= new LinkedHashSet<>(list); return retour; } public int achat(Joueur joueur_achat, Action a, float prix_achat, int volume_achat) { if (volume_achat <= 0.0) return -5; if (prix_achat <= 0.0) return -6; mutex.lock(); int argent_joueur = joueur_achat.getSolde_euros(); int argent_engage = (int) (volume_achat * prix_achat); if (argent_joueur < argent_engage) { mutex.unlock(); return -7; } Iterator<Ordre> it = liste_ventes.get(a).iterator(); while (it.hasNext() && volume_achat > 0) { Ordre vente = it.next(); Joueur joueur_vente = vente.getJoueur(); if (prix_achat >= vente.prix) { Echange e = new Echange(joueur_vente, joueur_achat, vente.prix, Math.min(vente.volume, volume_achat)); historiques.get(a).add(e); // l'achat est complete if (volume_achat <= vente.volume) { int volume_vendu = volume_achat; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (vente.prix * volume_vendu)); joueur_achat.setSolde_euros(joueur_achat.getSolde_euros() - (int) (vente.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); vente.setVolume(vente.getVolume() - volume_vendu); mutex.unlock(); return 0; } // volume_achat > vente.volume int volume_vendu = vente.volume; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (vente.prix * volume_vendu)); joueur_achat.setSolde_euros(joueur_achat.getSolde_euros() - (int) (vente.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); vente.setVolume(vente.getVolume() - volume_vendu); // remove Integer id_vente = vente.getId_ordre(); joueur_vente.retirerOperation(id_vente); it.remove(); volume_achat -= volume_vendu; } else break; } // provisionne le reste joueur_achat.setSolde_euros(joueur_achat.getSolde_euros() - (int) (prix_achat * volume_achat)); int id = creer_id_ordre(); Ordre achat = new Achat(id, a, prix_achat, volume_achat, joueur_achat); liste_achats.get(a).add(achat); joueur_achat.getOperationsOuvertes().add(Pair.of(id, achat)); mutex.unlock(); return id; } public Object vend(Joueur joueur_vente, Action a, float prix_vente, int volume_vente) { if (volume_vente <= 0.0) return -8; if (prix_vente <= 0.0) return -9; mutex.lock(); int volume_joueur = joueur_vente.getSolde_actions().get(a); if (volume_joueur < volume_vente) { mutex.unlock(); return -10; } joueur_vente.getSolde_actions().put(a, volume_joueur - volume_vente); Iterator<Ordre> it = liste_achats.get(a).iterator(); while (it.hasNext() && volume_vente > 0) { Achat achat = (Achat) it.next(); Joueur joueur_achat = achat.getJoueur(); if (prix_vente <= achat.prix) { Echange e = new Echange(joueur_vente, joueur_achat, achat.prix, Math.min(achat.volume, volume_vente)); historiques.get(a).add(e); // la vente est complete if (volume_vente <= achat.volume) { int volume_vendu = volume_vente; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (achat.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); // achat.setArgent_paye(achat.getArgent_paye() + (int) (achat.prix * volume_vendu)); achat.setVolume(achat.getVolume() - volume_vente); mutex.unlock(); return 0; } // volume_vente > vente.volume int volume_vendu = achat.volume; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (achat.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); // achat.setArgent_paye(achat.getArgent_paye() + (int) (achat.prix * volume_vendu)); achat.setVolume(achat.getVolume() - volume_vente); // remove Integer id_vente = achat.getId_ordre(); joueur_achat.retirerOperation(id_vente); it.remove(); volume_vente -= volume_vendu; } else break; } int id = creer_id_ordre(); Ordre vente = new Vente(id, a, prix_vente, volume_vente, joueur_vente); liste_ventes.get(a).add(vente); joueur_vente.getOperationsOuvertes().add(Pair.of(id, vente)); mutex.unlock(); return id; } public int suivre(Joueur joueur, Integer ordre) { mutex.lock(); Ordre o = joueur.contientOperation(ordre); if (o == null) { mutex.unlock(); return 0; } int vol = o.getVolume(); mutex.unlock(); return vol; } public int annuler(Joueur joueur, int ordre_id) { mutex.lock(); Ordre o = joueur.contientOperation(ordre_id); if (o == null) { mutex.unlock(); return -11; } if (o instanceof Achat) { liste_achats.remove(o); joueur.retirerOperation(ordre_id); int argent_recupere = (int) (o.prix * o.volume); if (argent_recupere > 0) joueur.setSolde_euros(joueur.getSolde_euros() + argent_recupere); mutex.unlock(); return argent_recupere; } else { liste_ventes.remove(o); joueur.retirerOperation(ordre_id); int nb_action = joueur.getSolde_actions().get(o.action); joueur.getSolde_actions().put(o.action, nb_action + o.volume); mutex.unlock(); return o.volume; } } private synchronized int creer_id_ordre() { int numero_partie = (int) (Math.random() * 100000000); while (liste_id_ordres.contains((Integer) numero_partie)) numero_partie = (int) (Math.random() * 100000000); liste_id_ordres.add(numero_partie); return numero_partie; } public synchronized void retirer_joueur(Joueur joueur) { List<Integer> ordre_supprimer = new LinkedList<>(); for(Pair<Integer, Ordre> t : joueur.getOperationsOuvertes()){ ordre_supprimer.add(t.getLeft()); } for(Integer i : ordre_supprimer){ annuler(joueur, i); } liste_joueurs.remove(joueur); } public String fin() { StringBuffer sb = new StringBuffer(liste_joueurs.size() * 100); sb.append("{'temps':"); long secondes = ((debut + Config.getInstance().TEMPS_PARTIE * 60 * 1000) - System.currentTimeMillis()) / 1000; if (secondes < 0) secondes = 0; sb.append(String.valueOf(secondes)); if (fini) { sb.append(",'classement':["); Collections.sort(liste_joueurs); for(Joueur j : liste_joueurs){ sb.append("'"); sb.append(j.getNom()); sb.append("',"); } sb.deleteCharAt(sb.length()-1); sb.append("]"); } sb.append("}"); return new String(sb); } @Override public String toString() { return "Marche [ouvert=" + ouvert + ", fini=" + fini + ", debut=" + debut + ", liste_achats=" + liste_achats + ", liste_ventes=" + liste_ventes + ", liste_joueurs=" + liste_joueurs + ", liste_id_ordres=" + liste_id_ordres + ", historiques=" + historiques + ", mutex=" + mutex + "]"; } public void destroy(){ if(timer != null){ timer.interrupt(); } } }
2017/SimBourse/src/core/Marche.java
package core; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang3.tuple.Pair; public class Marche { private boolean ouvert; private boolean fini; private long debut; private Map<Action, Set<Ordre>> liste_achats; private Map<Action, Set<Ordre>> liste_ventes; private List<Joueur> liste_joueurs; private Set<Integer> liste_id_ordres; private Map<Action, Set<Echange>> historiques; private final Lock mutex = new ReentrantLock(); private Thread timer = null; public Marche() { ouvert = false; fini = false; liste_achats = new HashMap<>(); liste_ventes = new HashMap<>(); historiques = new HashMap<>(); for (Action a : Action.values()) { liste_achats.put(a, new TreeSet<Ordre>()); liste_ventes.put(a, new TreeSet<Ordre>()); historiques.put(a, new TreeSet<Echange>()); } liste_joueurs = new LinkedList<>(); liste_id_ordres = new TreeSet<>(); if(Config.getInstance().BANQUE){ Joueur banque = creer_joueur("banque"); banque.setSolde_euros(Integer.MAX_VALUE); int max_action_en_jeu = Config.getInstance().SOLDE_ACTIONS_INIT*Action.values().length*100;//100 joueurs for(Action a : Action.values()){ banque.getSolde_actions().put(a, Integer.MAX_VALUE); achat(banque, a, 0.25f, max_action_en_jeu); vend(banque, a, 25.0f, max_action_en_jeu); } } } public boolean est_ouvert() { return ouvert; } public void commence() { ouvert = true; debut = System.currentTimeMillis(); timer = new Thread() { @Override public void run() { try { Thread.sleep(1000 * 60 * Config.getInstance().TEMPS_PARTIE); } catch (InterruptedException e) { //might be interrupted if creator leaves //don't print error in this case } fini = true; try { Thread.sleep(1000 * 60); // sleep 1 s to be sure fini is taking into account } catch (InterruptedException e) { //might be interrupted if creator leaves //don't print error in this case } //fermer tous les ordres for(Joueur joueur : liste_joueurs){ List<Integer> ordre_supprimer = new LinkedList<>(); for(Pair<Integer, Ordre> t : joueur.getOperationsOuvertes()){ ordre_supprimer.add(t.getLeft()); } for(Integer i : ordre_supprimer){ annuler(joueur, i); } } } }; timer.start(); } public boolean est_fini() { return fini; } public synchronized Joueur creer_joueur(String nom) { Joueur j = new Joueur(nom); liste_joueurs.add(j); return j; } public synchronized boolean nom_possible(String nom) { for (Joueur j : liste_joueurs) if (j.getNom().equalsIgnoreCase(nom)) return false; return true; } public Set<Ordre> getListeAchats(Action a) { return liste_achats.get(a); } public Set<Ordre> getListeVentes(Action a) { return liste_ventes.get(a); } public Set<Echange> getHistoriqueEchanges(Action a) { return historiques.get(a); } public int achat(Joueur joueur_achat, Action a, float prix_achat, int volume_achat) { if (volume_achat <= 0.0) return -5; if (prix_achat <= 0.0) return -6; mutex.lock(); int argent_joueur = joueur_achat.getSolde_euros(); int argent_engage = (int) (volume_achat * prix_achat); if (argent_joueur < argent_engage) { mutex.unlock(); return -7; } Iterator<Ordre> it = liste_ventes.get(a).iterator(); while (it.hasNext() && volume_achat > 0) { Ordre vente = it.next(); Joueur joueur_vente = vente.getJoueur(); if (prix_achat >= vente.prix) { Echange e = new Echange(joueur_vente, joueur_achat, vente.prix, Math.min(vente.volume, volume_achat)); historiques.get(a).add(e); // l'achat est complete if (volume_achat <= vente.volume) { int volume_vendu = volume_achat; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (vente.prix * volume_vendu)); joueur_achat.setSolde_euros(joueur_achat.getSolde_euros() - (int) (vente.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); vente.setVolume(vente.getVolume() - volume_vendu); mutex.unlock(); return 0; } // volume_achat > vente.volume int volume_vendu = vente.volume; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (vente.prix * volume_vendu)); joueur_achat.setSolde_euros(joueur_achat.getSolde_euros() - (int) (vente.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); vente.setVolume(vente.getVolume() - volume_vendu); // remove Integer id_vente = vente.getId_ordre(); joueur_vente.retirerOperation(id_vente); it.remove(); volume_achat -= volume_vendu; } else break; } // provisionne le reste joueur_achat.setSolde_euros(joueur_achat.getSolde_euros() - (int) (prix_achat * volume_achat)); int id = creer_id_ordre(); Ordre achat = new Achat(id, a, prix_achat, volume_achat, joueur_achat); liste_achats.get(a).add(achat); joueur_achat.getOperationsOuvertes().add(Pair.of(id, achat)); mutex.unlock(); return id; } public Object vend(Joueur joueur_vente, Action a, float prix_vente, int volume_vente) { if (volume_vente <= 0.0) return -8; if (prix_vente <= 0.0) return -9; mutex.lock(); int volume_joueur = joueur_vente.getSolde_actions().get(a); if (volume_joueur < volume_vente) { mutex.unlock(); return -10; } joueur_vente.getSolde_actions().put(a, volume_joueur - volume_vente); Iterator<Ordre> it = liste_achats.get(a).iterator(); while (it.hasNext() && volume_vente > 0) { Achat achat = (Achat) it.next(); Joueur joueur_achat = achat.getJoueur(); if (prix_vente <= achat.prix) { Echange e = new Echange(joueur_vente, joueur_achat, achat.prix, Math.min(achat.volume, volume_vente)); historiques.get(a).add(e); // la vente est complete if (volume_vente <= achat.volume) { int volume_vendu = volume_vente; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (achat.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); // achat.setArgent_paye(achat.getArgent_paye() + (int) (achat.prix * volume_vendu)); achat.setVolume(achat.getVolume() - volume_vente); mutex.unlock(); return 0; } // volume_vente > vente.volume int volume_vendu = achat.volume; joueur_vente.setSolde_euros(joueur_vente.getSolde_euros() + (int) (achat.prix * volume_vendu)); joueur_achat.getSolde_actions().put(a, joueur_achat.getSolde_actions().get(a) + volume_vendu); // achat.setArgent_paye(achat.getArgent_paye() + (int) (achat.prix * volume_vendu)); achat.setVolume(achat.getVolume() - volume_vente); // remove Integer id_vente = achat.getId_ordre(); joueur_achat.retirerOperation(id_vente); it.remove(); volume_vente -= volume_vendu; } else break; } int id = creer_id_ordre(); Ordre vente = new Vente(id, a, prix_vente, volume_vente, joueur_vente); liste_ventes.get(a).add(vente); joueur_vente.getOperationsOuvertes().add(Pair.of(id, vente)); mutex.unlock(); return id; } public int suivre(Joueur joueur, Integer ordre) { mutex.lock(); Ordre o = joueur.contientOperation(ordre); if (o == null) { mutex.unlock(); return 0; } int vol = o.getVolume(); mutex.unlock(); return vol; } public int annuler(Joueur joueur, int ordre_id) { mutex.lock(); Ordre o = joueur.contientOperation(ordre_id); if (o == null) { mutex.unlock(); return -11; } if (o instanceof Achat) { liste_achats.remove(o); joueur.retirerOperation(ordre_id); int argent_recupere = (int) (o.prix * o.volume); if (argent_recupere > 0) joueur.setSolde_euros(joueur.getSolde_euros() + argent_recupere); mutex.unlock(); return argent_recupere; } else { liste_ventes.remove(o); joueur.retirerOperation(ordre_id); int nb_action = joueur.getSolde_actions().get(o.action); joueur.getSolde_actions().put(o.action, nb_action + o.volume); mutex.unlock(); return o.volume; } } private synchronized int creer_id_ordre() { int numero_partie = (int) (Math.random() * 100000000); while (liste_id_ordres.contains((Integer) numero_partie)) numero_partie = (int) (Math.random() * 100000000); liste_id_ordres.add(numero_partie); return numero_partie; } public synchronized void retirer_joueur(Joueur joueur) { List<Integer> ordre_supprimer = new LinkedList<>(); for(Pair<Integer, Ordre> t : joueur.getOperationsOuvertes()){ ordre_supprimer.add(t.getLeft()); } for(Integer i : ordre_supprimer){ annuler(joueur, i); } liste_joueurs.remove(joueur); } public String fin() { StringBuffer sb = new StringBuffer(liste_joueurs.size() * 100); sb.append("{'temps':"); long secondes = ((debut + Config.getInstance().TEMPS_PARTIE * 60 * 1000) - System.currentTimeMillis()) / 1000; if (secondes < 0) secondes = 0; sb.append(String.valueOf(secondes)); if (fini) { sb.append(",'classement':["); Collections.sort(liste_joueurs); for(Joueur j : liste_joueurs){ sb.append("'"); sb.append(j.getNom()); sb.append("',"); } sb.deleteCharAt(sb.length()-1); sb.append("]"); } sb.append("}"); return new String(sb); } @Override public String toString() { return "Marche [ouvert=" + ouvert + ", fini=" + fini + ", debut=" + debut + ", liste_achats=" + liste_achats + ", liste_ventes=" + liste_ventes + ", liste_joueurs=" + liste_joueurs + ", liste_id_ordres=" + liste_id_ordres + ", historiques=" + historiques + ", mutex=" + mutex + "]"; } public void destroy(){ if(timer != null){ timer.interrupt(); } } }
Update Marche.java
2017/SimBourse/src/core/Marche.java
Update Marche.java
Java
mit
9e1f7dcbeef6dfd52f4a1d354deea99123efa3d0
0
Monaden/automatabuilder
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package automatabuilder; import interfaces.IState; import interfaces.ITransition; import java.util.Vector; import org.junit.*; import static org.junit.Assert.*; /** * * @author Adam */ public class StateTest { private Vector<ITransition> transitions; private IState q0; private IState q1; private Symbol a; @Before public void setUp() throws Exception { transitions = new Vector<>(); } private void buildStateWithOneTransitions(){ a = new Symbol("a"); q0 = new State(transitions, false, "q0"); q1 = new State(new Vector(), true, "q1"); Transition t1 = new Transition(q1, a); transitions.add(t1); } @Test public void testGetName() { State state = new State(transitions, true, "q0"); assertEquals("q0", state.getName()); } @Test public void testIsFinal() { State state = new State(transitions, true, "q0"); assertEquals(true, state.isFinal()); } @Test public void testTransition() { buildStateWithOneTransitions(); IState result = q0.transition(a); assertEquals(q1, result); } @Test public void testToString() { buildStateWithOneTransitions(); assertEquals("q0:(a->q1)", q0.toString()); assertEquals("*q1:()",q1.toString()); } }
src/test/java/automatabuilder/StateTest.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package automatabuilder; import interfaces.IState; import interfaces.ITransition; import java.util.Vector; import org.junit.*; import static org.junit.Assert.*; /** * * @author Adam */ public class StateTest { private Vector<ITransition> transitions; private IState q0; private IState q1; private Symbol a; @Before public void setUp() throws Exception { transitions = new Vector<>(); } private void buildStateWithOneTransitions(){ a = new Symbol("a"); q0 = new State(transitions, false, "q0"); q1 = new State(new Vector(), true, "q1"); Transition t1 = new Transition(q1, a); transitions.add(t1); } @Test public void testGetName() { State state = new State(transitions, true, "q0"); String expResult = "q0"; assertEquals(expResult, state.getName()); } @Test public void testIsFinal() { State state = new State(transitions, true, "q0"); assertEquals(true, state.isFinal()); } @Test public void testTransition() { buildStateWithOneTransitions(); IState result = q0.transition(a); assertEquals(q1, result); } @Test public void testToString() { buildStateWithOneTransitions(); assertEquals("q0:(a->q1)", q0.toString()); assertEquals("*q1:()",q1.toString()); } }
removed one duplicate line
src/test/java/automatabuilder/StateTest.java
removed one duplicate line
Java
mit
e57f97992cfd9fa92f632a7b8dd005e541b05c35
0
nemerosa/ontrack,nemerosa/ontrack,flesire/ontrack,nemerosa/ontrack,nemerosa/ontrack,flesire/ontrack,flesire/ontrack,flesire/ontrack,nemerosa/ontrack,flesire/ontrack
package net.nemerosa.ontrack.boot.ui; import net.nemerosa.ontrack.extension.api.ExtensionManager; import net.nemerosa.ontrack.extension.api.UserMenuExtension; import net.nemerosa.ontrack.model.Ack; import net.nemerosa.ontrack.model.form.Form; import net.nemerosa.ontrack.model.form.Password; import net.nemerosa.ontrack.model.form.YesNo; import net.nemerosa.ontrack.model.labels.LabelManagement; import net.nemerosa.ontrack.model.security.*; import net.nemerosa.ontrack.model.support.Action; import net.nemerosa.ontrack.model.support.PasswordChange; import net.nemerosa.ontrack.ui.controller.AbstractResourceController; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.security.access.AccessDeniedException; import org.springframework.web.bind.annotation.*; import javax.validation.Valid; import java.util.Collection; import static org.springframework.web.servlet.mvc.method.annotation.MvcUriComponentsBuilder.on; @RestController @RequestMapping("/user") public class UserController extends AbstractResourceController { private final SecurityService securityService; private final UserService userService; private final ExtensionManager extensionManager; @Autowired public UserController(SecurityService securityService, UserService userService, ExtensionManager extensionManager) { this.securityService = securityService; this.userService = userService; this.extensionManager = extensionManager; } @RequestMapping(value = "", method = RequestMethod.GET) public ConnectedAccount getCurrentUser() { // Gets the current account Account account = securityService.getCurrentAccount(); // Account present if (account != null) { return toLoggedAccount(account); } // Not logged else { return toAnonymousAccount(); } } @RequestMapping(value = "login", method = RequestMethod.GET) public Form loginForm() { return Form.create() .name() .password() .with(YesNo.of("rememberMe").label("Remember me").value(false)) ; } @RequestMapping(value = "login", method = RequestMethod.POST) public ConnectedAccount login() { // Gets the current account Account account = securityService.getCurrentAccount(); // If not logged, rejects if (account == null) { throw new AccessDeniedException("Login required."); } // Already logged else { return toLoggedAccount(account); } } @RequestMapping(value = "logged-out", method = RequestMethod.GET) @ResponseStatus(HttpStatus.NO_CONTENT) public void loggedOut() { } @RequestMapping(value = "password", method = RequestMethod.GET) public Form getChangePasswordForm() { return Form.create() .with( Password.of("oldPassword") .label("Old password") .help("You need your old password in order to change it. If you do not remember it, " + "you'll have to contact an administrator who can change it for you.") ) .with( Password.of("newPassword") .label("New password") .withConfirmation() ) ; } @RequestMapping(value = "password", method = RequestMethod.POST) public Ack changePassword(@RequestBody @Valid PasswordChange input) { return userService.changePassword(input); } // Resource assemblers private ConnectedAccount toAnonymousAccount() { return ConnectedAccount.none(isAuthenticationRequired()); } private boolean isAuthenticationRequired() { return !securityService.getSecuritySettings().isGrantProjectViewToAll(); } private ConnectedAccount toLoggedAccount(Account account) { return userMenu(ConnectedAccount.of(isAuthenticationRequired(), account)); } private ConnectedAccount userMenu(ConnectedAccount user) { // Settings if (securityService.isGlobalFunctionGranted(GlobalSettings.class)) { user.add(Action.of("settings", "Settings", "settings")); } // Changing his password if (user.getAccount().getAuthenticationSource().isAllowingPasswordChange()) { user.add( Action.form( "user-password", "Change password", uri(on(getClass()).getChangePasswordForm()) ) ); } // Account management if (securityService.isGlobalFunctionGranted(AccountManagement.class) || securityService.isGlobalFunctionGranted(AccountGroupManagement.class)) { user.add(Action.of("admin-accounts", "Account management", "admin-accounts")); } // Management of predefined validation stamps and promotion levels if (securityService.isGlobalFunctionGranted(GlobalSettings.class)) { user.add(Action.of("admin-predefined-validation-stamps", "Predefined validation stamps", "admin-predefined-validation-stamps")); user.add(Action.of("admin-predefined-promotion-levels", "Predefined promotion levels", "admin-predefined-promotion-levels")); } // Management of labels if (securityService.isGlobalFunctionGranted(LabelManagement.class)) { user.add(Action.of("admin-labels", "Labels", "admin-labels")); } // Contributions from extensions ConnectedAccount contributed = userMenuExtensions(user); // Admin tools if (securityService.isGlobalFunctionGranted(ApplicationManagement.class)) { contributed.add(Action.of("admin-health", "System health", "admin-health")); contributed.add(Action.of("admin-extensions", "System extensions", "admin-extensions")); contributed.add(Action.of("admin-jobs", "System jobs", "admin-jobs")); contributed.add(Action.of("admin-log-entries", "Log entries", "admin-log-entries")); } // OK return contributed; } private ConnectedAccount userMenuExtensions(ConnectedAccount user) { // Gets the list of user menu extensions Collection<UserMenuExtension> extensions = extensionManager.getExtensions(UserMenuExtension.class); // For each extension for (UserMenuExtension extension : extensions) { // Granted? Class<? extends GlobalFunction> fn = extension.getGlobalFunction(); if (fn == null || securityService.isGlobalFunctionGranted(fn)) { // Adds the menu entry // Prepends the extension ID user.add(resolveExtensionAction(extension)); } } // OK return user; } }
ontrack-ui/src/main/java/net/nemerosa/ontrack/boot/ui/UserController.java
package net.nemerosa.ontrack.boot.ui; import net.nemerosa.ontrack.extension.api.ExtensionManager; import net.nemerosa.ontrack.extension.api.UserMenuExtension; import net.nemerosa.ontrack.model.Ack; import net.nemerosa.ontrack.model.form.Form; import net.nemerosa.ontrack.model.form.Password; import net.nemerosa.ontrack.model.form.YesNo; import net.nemerosa.ontrack.model.security.*; import net.nemerosa.ontrack.model.support.Action; import net.nemerosa.ontrack.model.support.PasswordChange; import net.nemerosa.ontrack.ui.controller.AbstractResourceController; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.security.access.AccessDeniedException; import org.springframework.web.bind.annotation.*; import javax.validation.Valid; import java.util.Collection; import static org.springframework.web.servlet.mvc.method.annotation.MvcUriComponentsBuilder.on; @RestController @RequestMapping("/user") public class UserController extends AbstractResourceController { private final SecurityService securityService; private final UserService userService; private final ExtensionManager extensionManager; @Autowired public UserController(SecurityService securityService, UserService userService, ExtensionManager extensionManager) { this.securityService = securityService; this.userService = userService; this.extensionManager = extensionManager; } @RequestMapping(value = "", method = RequestMethod.GET) public ConnectedAccount getCurrentUser() { // Gets the current account Account account = securityService.getCurrentAccount(); // Account present if (account != null) { return toLoggedAccount(account); } // Not logged else { return toAnonymousAccount(); } } @RequestMapping(value = "login", method = RequestMethod.GET) public Form loginForm() { return Form.create() .name() .password() .with(YesNo.of("rememberMe").label("Remember me").value(false)) ; } @RequestMapping(value = "login", method = RequestMethod.POST) public ConnectedAccount login() { // Gets the current account Account account = securityService.getCurrentAccount(); // If not logged, rejects if (account == null) { throw new AccessDeniedException("Login required."); } // Already logged else { return toLoggedAccount(account); } } @RequestMapping(value = "logged-out", method = RequestMethod.GET) @ResponseStatus(HttpStatus.NO_CONTENT) public void loggedOut() { } @RequestMapping(value = "password", method = RequestMethod.GET) public Form getChangePasswordForm() { return Form.create() .with( Password.of("oldPassword") .label("Old password") .help("You need your old password in order to change it. If you do not remember it, " + "you'll have to contact an administrator who can change it for you.") ) .with( Password.of("newPassword") .label("New password") .withConfirmation() ) ; } @RequestMapping(value = "password", method = RequestMethod.POST) public Ack changePassword(@RequestBody @Valid PasswordChange input) { return userService.changePassword(input); } // Resource assemblers private ConnectedAccount toAnonymousAccount() { return ConnectedAccount.none(isAuthenticationRequired()); } private boolean isAuthenticationRequired() { return !securityService.getSecuritySettings().isGrantProjectViewToAll(); } private ConnectedAccount toLoggedAccount(Account account) { return userMenu(ConnectedAccount.of(isAuthenticationRequired(), account)); } private ConnectedAccount userMenu(ConnectedAccount user) { // Settings if (securityService.isGlobalFunctionGranted(GlobalSettings.class)) { user.add(Action.of("settings", "Settings", "settings")); } // Changing his password if (user.getAccount().getAuthenticationSource().isAllowingPasswordChange()) { user.add( Action.form( "user-password", "Change password", uri(on(getClass()).getChangePasswordForm()) ) ); } // Account management if (securityService.isGlobalFunctionGranted(AccountManagement.class) || securityService.isGlobalFunctionGranted(AccountGroupManagement.class)) { user.add(Action.of("admin-accounts", "Account management", "admin-accounts")); } // Management of predefined validation stamps and promotion levels if (securityService.isGlobalFunctionGranted(GlobalSettings.class)) { user.add(Action.of("admin-predefined-validation-stamps", "Predefined validation stamps", "admin-predefined-validation-stamps")); user.add(Action.of("admin-predefined-promotion-levels", "Predefined promotion levels", "admin-predefined-promotion-levels")); } // Contributions from extensions user = userMenuExtensions(user); // Admin tools if (securityService.isGlobalFunctionGranted(ApplicationManagement.class)) { user.add(Action.of("admin-health", "System health", "admin-health")); user.add(Action.of("admin-extensions", "System extensions", "admin-extensions")); user.add(Action.of("admin-jobs", "System jobs", "admin-jobs")); user.add(Action.of("admin-log-entries", "Log entries", "admin-log-entries")); } // OK return user; } private ConnectedAccount userMenuExtensions(ConnectedAccount user) { // Gets the list of user menu extensions Collection<UserMenuExtension> extensions = extensionManager.getExtensions(UserMenuExtension.class); // For each extension for (UserMenuExtension extension : extensions) { // Granted? Class<? extends GlobalFunction> fn = extension.getGlobalFunction(); if (fn == null || securityService.isGlobalFunctionGranted(fn)) { // Adds the menu entry // Prepends the extension ID user.add(resolveExtensionAction(extension)); } } // OK return user; } }
#615 Management - user menu entry
ontrack-ui/src/main/java/net/nemerosa/ontrack/boot/ui/UserController.java
#615 Management - user menu entry
Java
mit
ad6ced43962b0c1d19e9a07d9c308946f6a593db
0
Col-E/Recaf,Col-E/Recaf
package me.coley.recaf.debug; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import com.sun.jdi.*; import com.sun.jdi.connect.*; import com.sun.jdi.event.*; import com.sun.jdi.request.*; import com.sun.tools.jdi.*; import me.coley.recaf.workspace.*; import java.io.*; import java.lang.management.ManagementFactory; import java.util.*; import java.util.function.Consumer; import static me.coley.recaf.util.Log.*; /** * JDI {@link VirtualMachine} wrapper. * * @author Matt */ public class VMWrap { private static final long PRINT_THREAD_DELAY = 30L; private static final int CONNECTOR_TIMEOUT = 5000; // private final Map<Class<? extends Event>, Consumer<Event>> eventConsumers = new HashMap<>(); private final Multimap<String, Consumer<ClassPrepareEvent>> prepares = newMultiMap(); private final Multimap<String, Consumer<ClassUnloadEvent>> unloads = newMultiMap(); private final Multimap<Location, Consumer<BreakpointEvent>> breakpoints = newMultiMap(); private final Multimap<ThreadReference, Consumer<StepEvent>> steps = newMultiMap(); private final Set<Consumer<ExceptionEvent>> exceptions = new HashSet<>(); private final Set<Consumer<MethodEntryEvent>> methodEntries = new HashSet<>(); private final Set<Consumer<MethodExitEvent>> methodExits = new HashSet<>(); private final Set<Consumer<MonitorWaitEvent>> monitorWaits = new HashSet<>(); private final Set<Consumer<MonitorWaitedEvent>> monitorWaiteds = new HashSet<>(); private final Set<Consumer<MonitorContendedEnterEvent>> monitorContendEnters = new HashSet<>(); private final Set<Consumer<MonitorContendedEnteredEvent>> monitorContendEntereds = new HashSet<>(); private final Set<Consumer<AccessWatchpointEvent>> watchpointAccesses = new HashSet<>(); private final Set<Consumer<ModificationWatchpointEvent>> watchpointModifies = new HashSet<>(); private final Set<Consumer<ThreadStartEvent>> threadStarts = new HashSet<>(); private final Set<Consumer<ThreadDeathEvent>> threadDeaths = new HashSet<>(); private final Set<Consumer<VMStartEvent>> vmStarts = new HashSet<>(); private final Set<Consumer<VMDeathEvent>> vmDeaths = new HashSet<>(); private final Set<Consumer<VMDisconnectEvent>> vmDisconnects = new HashSet<>(); private final VirtualMachine vm; private PrintStream out; /** * @param vm * Virtual machine to wrap. */ private VMWrap(VirtualMachine vm) { this.vm = vm; setupEventConsumers(); } /** * Assuming the current process is running as a debugee, connects to the current process. * * @return Wrapper for the current running context. * * @throws IOException * Thrown if connecting to the current process failed. */ public static VMWrap current() throws IOException { // Get pid of self process String name = ManagementFactory.getRuntimeMXBean().getName(); String pid = name.substring(0, name.indexOf('@')); return process(pid); } /** * Assuming the given process is running as a debugee, connects to the current process. * * @param pid Process id. * * @return Wrapper for the given running context. * * @throws IOException * Thrown if connecting to the given process failed. */ public static VMWrap process(String pid) throws IOException { AttachingConnector connector = Bootstrap.virtualMachineManager().attachingConnectors() .stream() .filter(c -> c.name().equals("com.sun.jdi.ProcessAttach")) .findFirst() .orElseThrow(() -> new RuntimeException("Unable to locate ProcessAttachingConnector")); Map<String, ? extends Connector.Argument> args = connector.defaultArguments(); args.get("pid").setValue(pid); args.get("timeout").setValue(String.valueOf(CONNECTOR_TIMEOUT)); // Try to connect try { return new VMWrap(connector.attach(args)); } catch(IllegalConnectorArgumentsException ex) { throw new IOException(ex); } } /** * Connect to an already created debugee listening on the given port. * * @param port * Port to connect on. * @param address * Address to connect to. Use {@code null} for localhost. * * @return Wrapper for the given running context. * * @throws IOException * Thrown if connecting to the given process failed. */ public static VMWrap connect(String port, String address) throws IOException { // com.sun.jdi.SocketAttach AttachingConnector connector = Bootstrap.virtualMachineManager().attachingConnectors() .stream() .filter(c -> c.name().equals("com.sun.jdi.SocketAttach")) .findFirst() .orElseThrow(() -> new RuntimeException("Unable to locate SocketAttachingConnector")); Map<String, ? extends Connector.Argument> args = connector.defaultArguments(); args.get("timeout").setValue(String.valueOf(CONNECTOR_TIMEOUT)); args.get("port").setValue(port); if (address != null) args.get("localAddress").setValue(address); try { return new VMWrap(connector.attach(args)); } catch(IllegalConnectorArgumentsException ex) { throw new IOException(ex); } } /** * Start a process in debug mode and connect to it. * * @param main * Name of class containing the main method. * @param options * Launch arguments such as the classpath. * @param suspend * {@code true} to start the VM in a paused state. * * @return Wrapper for the newly created running context. * * @throws IOException * Thrown if connecting to the given process failed. */ public static VMWrap launching(String main, String options, boolean suspend) throws IOException { VirtualMachineManager vmm = Bootstrap.virtualMachineManager(); LaunchingConnector connector = vmm.defaultConnector(); Map<String, ? extends Connector.Argument> args = connector.defaultArguments(); args.get("options").setValue(options); args.get("suspend").setValue(String.valueOf(suspend)); args.get("main").setValue(main); try { return new VMWrap(connector.launch(args)); } catch(VMStartException | IllegalConnectorArgumentsException ex) { throw new IOException(ex); } } /** * @param backing * Resource to defer to. * * @return Workspace resource with the current vm link. */ public DebuggerResource toResource(JavaResource backing) { return new DebuggerResource(this, backing); } /** * @return The targeted VM. */ public VirtualMachine getTargetVM() { return vm; } /** * @param name * Quantified class name. * @param code * New bytecode for the class. * * @return {@code true} if redefinition succeeded. {@code false} if redefinition is not * supported. * * @throws JdiRedefineException * When redefinition failed for any of the following reasons:<ul> * <li>The given name has not been loaded by the target VM</li> * <li>A subfeature of redefinition was not supported <i>(Changing class schema for * example)</i></li> * <li>Bytecode does not pass the verifier</li> * <li>Bytecode uses an unsupported class file version</li> * <li>Bytecode is not a valid class</li> * <li>Bytecode does not represent the class given by the quantified name</li> * <li>Bytecode creates a circular inheritance hierarchy</li> * </ul> */ public boolean redefine(String name, byte[] code) throws JdiRedefineException { if(!vm.canRedefineClasses() || !vm.canBeModified()) return false; ClassType type = getType(name); if (type == null) throw new JdiRedefineException("Given class name has not been loaded by the target VM"); Map<ReferenceType, byte[]> map = Collections.singletonMap(type, code); try { vm.redefineClasses(map); } catch(UnsupportedOperationException ex) { throw new JdiRedefineException(ex, "Redefinition unsupported, [AddMethods:" + vm.canAddMethod() + ", unrestricted:" + vm.canUnrestrictedlyRedefineClasses() + "]"); } catch(NoClassDefFoundError ex) { throw new JdiRedefineException(ex, "Given bytecode does not match class being redefined"); } catch(UnsupportedClassVersionError ex) { throw new JdiRedefineException(ex, "Given bytecode has uses unsupported class file version"); } catch(VerifyError ex) { throw new JdiRedefineException(ex, "Given bytecode does not pass verification"); } catch(ClassFormatError ex) { throw new JdiRedefineException(ex, "Given bytecode is not a valid class"); } catch(ClassCircularityError ex) { throw new JdiRedefineException(ex, "Given bytecode has a circular hierarchy"); } return true; } /** * @return Optional of the main thread. */ public Optional<ThreadReference> getMainThread() { return vm.allThreads().stream().filter(t -> t.name().equals("main")).findFirst(); } /** * Invoke a static method on the remote VM. * * @param owner * Quantified class name. * @param name * Method name. * @param desc * Method descriptor. * @param args * Arguments to pass. Supported types are: <ul> * <li>Primitives <i>(int, float, etc.)</i></li> * <li>String</li> * <li>{@link Value}</li> * </ul> * * @return JDI mirrored value. * * @throws JdiInvokeException * When invoke failed for any of the following reasons:<ul> * <li>Invalid owner type <i>(Including if owner class is not loaded)</i></li> * <li>Invalid method type</li> * <li>Method not found for owner type</li> * <li>Method cannot be invoked on the main thread</li> * </ul> */ public Value invokeStatic(String owner, String name, String desc, Object... args) throws JdiInvokeException { // Get references needed for the invoke ClassType c = getType(owner); if (c == null) throw new JdiInvokeException("Given class name has not been loaded by the target VM"); Optional<ThreadReference> thread = getMainThread(); if (!thread.isPresent()) throw new JdiInvokeException("No main thread found"); Method method = c.concreteMethodByName(name, desc); // Create mirror values of args List<Value> argMirros = new ArrayList<>(); for (Object arg : args) { if (arg instanceof String) argMirros.add(vm.mirrorOf((String) arg)); else if (arg instanceof Integer) argMirros.add(vm.mirrorOf((int) arg)); else if (arg instanceof Boolean) argMirros.add(vm.mirrorOf((boolean) arg)); else if (arg instanceof Long) argMirros.add(vm.mirrorOf((long) arg)); else if (arg instanceof Float) argMirros.add(vm.mirrorOf((float) arg)); else if (arg instanceof Double) argMirros.add(vm.mirrorOf((double) arg)); else if (arg instanceof Byte) argMirros.add(vm.mirrorOf((byte) arg)); else if (arg instanceof Character) argMirros.add(vm.mirrorOf((char) arg)); else if (arg instanceof Short) argMirros.add(vm.mirrorOf((short) arg)); else if (arg instanceof Value) argMirros.add((Value) arg); else throw new JdiInvokeException("Invalid type given in args: " + arg.getClass().getName()); } // Attempt to invoke try { return c.invokeMethod(thread.get(), method, argMirros, 0); } catch(InvalidTypeException ex) { throw new JdiInvokeException(ex, "Given type was invalid"); } catch(ClassNotLoadedException ex) { throw new JdiInvokeException(ex, "Given owner was not loaded"); } catch(IncompatibleThreadStateException ex) { throw new JdiInvokeException(ex, "Cannot invoke method on main thread"); } catch(InvocationException ex) { throw new JdiInvokeException(ex, "Generic invoke error"); } } /** * @param name * Quantified class name. * * @return Reference type for the class. {@code null} if the class is not loaded in the target * vm. */ public ClassType getType(String name) { List<ReferenceType> matches = vm.classesByName(name); if (matches.isEmpty()) return null; return (ClassType) matches.get(0); } /** * Register an action for when classes are prepared. * * @param name * Quantified name of the class. {@code null} to accept any class. * @param action * Action to run. * * @return The request. */ public ClassPrepareRequest prepare(String name, Consumer<ClassPrepareEvent> action) { ClassPrepareRequest request = vm.eventRequestManager().createClassPrepareRequest(); if (name != null) request.addClassFilter(name); prepares.put(name, action); return request; } /** * Register an action &amp; breakpoint for when a location is hit. * * @param location * Location to add breakpoint to. * @param action * Action to run. * * @return The request. */ public BreakpointRequest breakpoint(Location location, Consumer<BreakpointEvent> action) { BreakpointRequest request = vm.eventRequestManager().createBreakpointRequest(location); breakpoints.put(location, action); return request; } /** * Register a step on the given thread. * * @param thread * The thread to intercept steps in. * @param size * Step size. <ul> * <li>{@link StepRequest#STEP_LINE} for stepping to the next line. * If no debug info is givem, this defaults to {@link StepRequest#STEP_MIN}</li> * <li>{@link StepRequest#STEP_MIN} for <i>any</i> code index change.</li> * </ul> * @param depth * Step depth. * <ul> * <li>{@link StepRequest#STEP_INTO} for stepping into new frames.</li> * <li>{@link StepRequest#STEP_OVER} for stepping over new frames.</li> * <li>{@link StepRequest#STEP_OUT} for stepping out of the current frame.</li> * </ul> * @param action * Action to run. * * @return The request. */ public StepRequest step(ThreadReference thread, int size, int depth, Consumer<StepEvent> action) { StepRequest request = vm.eventRequestManager().createStepRequest(thread, size, depth); steps.put(thread, action); return request; } /** * Register an action for entering methods. * * @param action * Action to run. * * @return The request. */ public MethodEntryRequest methodEntry(Consumer<MethodEntryEvent> action) { MethodEntryRequest request = vm.eventRequestManager().createMethodEntryRequest(); methodEntries.add(action); return request; } /** * Register an action for exiting methods. * * @param action * Action to run. * * @return The request. */ public MethodExitRequest methodExit(Consumer<MethodExitEvent> action) { MethodExitRequest request = vm.eventRequestManager().createMethodExitRequest(); methodExits.add(action); return request; } /** * Register an action for thrown exceptions. * * @param type * The type of exception to catch. {@code null} for any time. * @param caught * Call actions on caught exceptions. * @param uncaught * Call actions on uncaught exceptions. * @param action * Action to run. * * @return The request. */ public ExceptionRequest exception(ReferenceType type, boolean caught, boolean uncaught, Consumer<ExceptionEvent> action) { ExceptionRequest request = vm.eventRequestManager() .createExceptionRequest(type, caught, uncaught); exceptions.add(action); return request; } // TODO: Registering methods for the events that don't have 'em /** * Set IO handlers. * * @param out * Stream to send VM's output to. May be {@code null}. */ public void setup(PrintStream out) { this.out = out; } /** * @return Debugged process. */ public Process getProcess() { return vm.process(); } /** * Begin handling vm events. */ public void start() { // Used to keep track of if we're still attached to the VM. boolean[] running = {true}; try { // Start redirecting process output new Thread(() -> { try { InputStream pOutput = vm.process().getInputStream(); InputStream pErr = vm.process().getErrorStream(); byte[] buffer = new byte[4096]; while(running[0] || vm.process().isAlive()) { // Handle receiving output if(out != null) { int size = pOutput.available(); if(size > 0) { int n = pOutput.read(buffer, 0, Math.min(size, buffer.length)); out.println(new String(buffer, 0, n)); } size = pErr.available(); if(size > 0) { int n = pErr.read(buffer, 0, Math.min(size, buffer.length)); out.println(new String(buffer, 0, n)); } } Thread.sleep(PRINT_THREAD_DELAY); } } catch(InterruptedException | IOException ex) { error(ex, "Exception occurred while processing VM IPC"); } }).start(); // Handle vm events eventLoop(); } catch(VMDisconnectedException ex) { // Expected // - Stop print redirect thread running[0] = false; } catch(InterruptedException ex) { error(ex, "Failed processing VM event queue"); } } private void eventLoop() throws VMDisconnectedException, InterruptedException { vm.resume(); EventSet eventSet = null; while((eventSet = vm.eventQueue().remove()) != null) { for(Event event : eventSet) { // Key is the first interface parent because they all are interface impls. Class<?> key = event.getClass(); if (key.getInterfaces().length > 0) key = key.getInterfaces()[0]; // Run consumers if any found. Consumer<Event> consumer = eventConsumers.get(key); if(consumer != null) consumer.accept(event); // continue VM operations vm.resume(); } } } private void setupEventConsumers() { eventConsumers.put(ClassPrepareEvent.class, (event) -> { ClassPrepareEvent prepare = (ClassPrepareEvent) event; String key = prepare.referenceType().name(); prepares.get(key).forEach(consumer -> consumer.accept(prepare)); }); eventConsumers.put(ClassUnloadEvent.class, (event) -> { ClassUnloadEvent unload = (ClassUnloadEvent) event; String key = unload.className(); unloads.get(key).forEach(consumer -> consumer.accept(unload)); }); eventConsumers.put(BreakpointEvent.class, (event) -> { BreakpointEvent breakpoint = (BreakpointEvent) event; Location key = breakpoint.location(); breakpoints.get(key).forEach(consumer -> consumer.accept(breakpoint)); }); eventConsumers.put(StepEvent.class, (event) -> { StepEvent step = (StepEvent) event; ThreadReference key = step.thread(); steps.get(key).forEach(consumer -> consumer.accept(step)); }); eventConsumers.put(MethodEntryEvent.class, (event) -> { MethodEntryEvent entry = (MethodEntryEvent) event; methodEntries.forEach(consumer -> consumer.accept(entry)); }); eventConsumers.put(MethodExitEvent.class, (event) -> { MethodExitEvent exit = (MethodExitEvent) event; methodExits.forEach(consumer -> consumer.accept(exit)); }); eventConsumers.put(MonitorWaitEvent.class, (event) -> { MonitorWaitEvent wait = (MonitorWaitEvent) event; monitorWaits.forEach(consumer -> consumer.accept(wait)); }); eventConsumers.put(MonitorWaitedEvent.class, (event) -> { MonitorWaitedEvent wait = (MonitorWaitedEvent) event; monitorWaiteds.forEach(consumer -> consumer.accept(wait)); }); eventConsumers.put(MonitorContendedEnterEvent.class, (event) -> { MonitorContendedEnterEvent enter = (MonitorContendedEnterEvent) event; monitorContendEnters.forEach(consumer -> consumer.accept(enter)); }); eventConsumers.put(MonitorContendedEnteredEvent.class, (event) -> { MonitorContendedEnteredEvent entered = (MonitorContendedEnteredEvent) event; monitorContendEntereds.forEach(consumer -> consumer.accept(entered)); }); eventConsumers.put(AccessWatchpointEvent.class, (event) -> { AccessWatchpointEvent acc = (AccessWatchpointEvent) event; watchpointAccesses.forEach(consumer -> consumer.accept(acc)); }); eventConsumers.put(ModificationWatchpointEvent.class, (event) -> { ModificationWatchpointEvent modify = (ModificationWatchpointEvent) event; watchpointModifies.forEach(consumer -> consumer.accept(modify)); }); eventConsumers.put(ExceptionEvent.class, (event) -> { ExceptionEvent exc = (ExceptionEvent) event; exceptions.forEach(consumer -> consumer.accept(exc)); }); eventConsumers.put(ThreadStartEvent.class, (event) -> { ThreadStartEvent start = (ThreadStartEvent) event; threadStarts.forEach(consumer -> consumer.accept(start)); }); eventConsumers.put(ThreadDeathEvent.class, (event) -> { ThreadDeathEvent death = (ThreadDeathEvent) event; threadDeaths.forEach(consumer -> consumer.accept(death)); }); eventConsumers.put(VMStartEvent.class, (event) -> { VMStartEvent start = (VMStartEvent) event; vmStarts.forEach(consumer -> consumer.accept(start)); }); eventConsumers.put(VMDisconnectEvent.class, (event) -> { VMDisconnectEvent disconnect = (VMDisconnectEvent) event; vmDisconnects.forEach(consumer -> consumer.accept(disconnect)); }); eventConsumers.put(VMDeathEvent.class, (event) -> { VMDeathEvent death = (VMDeathEvent) event; vmDeaths.forEach(consumer -> consumer.accept(death)); }); } private static <K, V> Multimap<K, V> newMultiMap() { return MultimapBuilder.hashKeys().arrayListValues().build(); } }
src/main/java/me/coley/recaf/debug/VMWrap.java
package me.coley.recaf.debug; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import com.sun.jdi.*; import com.sun.jdi.connect.*; import com.sun.jdi.event.*; import com.sun.jdi.request.*; import com.sun.tools.jdi.*; import me.coley.recaf.workspace.*; import java.io.*; import java.lang.management.ManagementFactory; import java.util.*; import java.util.function.Consumer; import static me.coley.recaf.util.Log.*; /** * JDI {@link VirtualMachine} wrapper. * * @author Matt */ public class VMWrap { private static final long PRINT_THREAD_DELAY = 30L; private static final int CONNECTOR_TIMEOUT = 5000; // private final Map<Class<? extends Event>, Consumer<Event>> eventConsumers = new HashMap<>(); private final Multimap<String, Consumer<ClassPrepareEvent>> prepares = newMultiMap(); private final Multimap<String, Consumer<ClassUnloadEvent>> unloads = newMultiMap(); private final Multimap<Location, Consumer<BreakpointEvent>> breakpoints = newMultiMap(); private final Multimap<ThreadReference, Consumer<StepEvent>> steps = newMultiMap(); private final Set<Consumer<ExceptionEvent>> exceptions = new HashSet<>(); private final Set<Consumer<MethodEntryEvent>> methodEntries = new HashSet<>(); private final Set<Consumer<MethodExitEvent>> methodExits = new HashSet<>(); private final Set<Consumer<MonitorWaitEvent>> monitorWaits = new HashSet<>(); private final Set<Consumer<MonitorWaitedEvent>> monitorWaiteds = new HashSet<>(); private final Set<Consumer<MonitorContendedEnterEvent>> monitorContendEnters = new HashSet<>(); private final Set<Consumer<MonitorContendedEnteredEvent>> monitorContendEntereds = new HashSet<>(); private final Set<Consumer<AccessWatchpointEvent>> watchpointAccesses = new HashSet<>(); private final Set<Consumer<ModificationWatchpointEvent>> watchpointModifies = new HashSet<>(); private final Set<Consumer<ThreadStartEvent>> threadStarts = new HashSet<>(); private final Set<Consumer<ThreadDeathEvent>> threadDeaths = new HashSet<>(); private final Set<Consumer<VMStartEvent>> vmStarts = new HashSet<>(); private final Set<Consumer<VMDeathEvent>> vmDeaths = new HashSet<>(); private final Set<Consumer<VMDisconnectEvent>> vmDisconnects = new HashSet<>(); private final VirtualMachine vm; private PrintStream out; /** * @param vm * Virtual machine to wrap. */ private VMWrap(VirtualMachine vm) { this.vm = vm; setupEventConsumers(); } /** * Assuming the current process is running as a debugee, connects to the current process. * * @return Wrapper for the current running context. * * @throws IOException * Thrown if connecting to the current process failed. */ public static VMWrap current() throws IOException { // Get pid of self process String name = ManagementFactory.getRuntimeMXBean().getName(); String pid = name.substring(0, name.indexOf('@')); return process(pid); } /** * Assuming the given process is running as a debugee, connects to the current process. * * @param pid Process id. * * @return Wrapper for the given running context. * * @throws IOException * Thrown if connecting to the given process failed. */ public static VMWrap process(String pid) throws IOException { ProcessAttachingConnector connector = new ProcessAttachingConnector(); Map<String, ? extends Connector.Argument> args = connector.defaultArguments(); args.get("pid").setValue(pid); args.get("timeout").setValue(String.valueOf(CONNECTOR_TIMEOUT)); // Try to connect try { return new VMWrap(connector.attach(args)); } catch(IllegalConnectorArgumentsException ex) { throw new IOException(ex); } } /** * Connect to an already created debugee listening on the given port. * * @param port * Port to connect on. * @param address * Address to connect to. Use {@code null} for localhost. * * @return Wrapper for the given running context. * * @throws IOException * Thrown if connecting to the given process failed. */ public static VMWrap connect(String port, String address) throws IOException { SocketAttachingConnector connector = new SocketAttachingConnector(); Map<String, ? extends Connector.Argument> args = connector.defaultArguments(); args.get("timeout").setValue(String.valueOf(CONNECTOR_TIMEOUT)); args.get("port").setValue(port); if (address != null) args.get("localAddress").setValue(address); try { return new VMWrap(connector.attach(args)); } catch(IllegalConnectorArgumentsException ex) { throw new IOException(ex); } } /** * Start a process in debug mode and connect to it. * * @param main * Name of class containing the main method. * @param options * Launch arguments such as the classpath. * @param suspend * {@code true} to start the VM in a paused state. * * @return Wrapper for the newly created running context. * * @throws IOException * Thrown if connecting to the given process failed. */ public static VMWrap launching(String main, String options, boolean suspend) throws IOException { VirtualMachineManager vmm = Bootstrap.virtualMachineManager(); LaunchingConnector connector = vmm.defaultConnector(); Map<String, ? extends Connector.Argument> args = connector.defaultArguments(); args.get("options").setValue(options); args.get("suspend").setValue(String.valueOf(suspend)); args.get("main").setValue(main); try { return new VMWrap(connector.launch(args)); } catch(VMStartException | IllegalConnectorArgumentsException ex) { throw new IOException(ex); } } /** * @param backing * Resource to defer to. * * @return Workspace resource with the current vm link. */ public DebuggerResource toResource(JavaResource backing) { return new DebuggerResource(this, backing); } /** * @return The targeted VM. */ public VirtualMachine getTargetVM() { return vm; } /** * @param name * Quantified class name. * @param code * New bytecode for the class. * * @return {@code true} if redefinition succeeded. {@code false} if redefinition is not * supported. * * @throws JdiRedefineException * When redefinition failed for any of the following reasons:<ul> * <li>The given name has not been loaded by the target VM</li> * <li>A subfeature of redefinition was not supported <i>(Changing class schema for * example)</i></li> * <li>Bytecode does not pass the verifier</li> * <li>Bytecode uses an unsupported class file version</li> * <li>Bytecode is not a valid class</li> * <li>Bytecode does not represent the class given by the quantified name</li> * <li>Bytecode creates a circular inheritance hierarchy</li> * </ul> */ public boolean redefine(String name, byte[] code) throws JdiRedefineException { if(!vm.canRedefineClasses() || !vm.canBeModified()) return false; ClassType type = getType(name); if (type == null) throw new JdiRedefineException("Given class name has not been loaded by the target VM"); Map<ReferenceType, byte[]> map = Collections.singletonMap(type, code); try { vm.redefineClasses(map); } catch(UnsupportedOperationException ex) { throw new JdiRedefineException(ex, "Redefinition unsupported, [AddMethods:" + vm.canAddMethod() + ", unrestricted:" + vm.canUnrestrictedlyRedefineClasses() + "]"); } catch(NoClassDefFoundError ex) { throw new JdiRedefineException(ex, "Given bytecode does not match class being redefined"); } catch(UnsupportedClassVersionError ex) { throw new JdiRedefineException(ex, "Given bytecode has uses unsupported class file version"); } catch(VerifyError ex) { throw new JdiRedefineException(ex, "Given bytecode does not pass verification"); } catch(ClassFormatError ex) { throw new JdiRedefineException(ex, "Given bytecode is not a valid class"); } catch(ClassCircularityError ex) { throw new JdiRedefineException(ex, "Given bytecode has a circular hierarchy"); } return true; } /** * @return Optional of the main thread. */ public Optional<ThreadReference> getMainThread() { return vm.allThreads().stream().filter(t -> t.name().equals("main")).findFirst(); } /** * Invoke a static method on the remote VM. * * @param owner * Quantified class name. * @param name * Method name. * @param desc * Method descriptor. * @param args * Arguments to pass. Supported types are: <ul> * <li>Primitives <i>(int, float, etc.)</i></li> * <li>String</li> * <li>{@link Value}</li> * </ul> * * @return JDI mirrored value. * * @throws JdiInvokeException * When invoke failed for any of the following reasons:<ul> * <li>Invalid owner type <i>(Including if owner class is not loaded)</i></li> * <li>Invalid method type</li> * <li>Method not found for owner type</li> * <li>Method cannot be invoked on the main thread</li> * </ul> */ public Value invokeStatic(String owner, String name, String desc, Object... args) throws JdiInvokeException { // Get references needed for the invoke ClassType c = getType(owner); if (c == null) throw new JdiInvokeException("Given class name has not been loaded by the target VM"); Optional<ThreadReference> thread = getMainThread(); if (!thread.isPresent()) throw new JdiInvokeException("No main thread found"); Method method = c.concreteMethodByName(name, desc); // Create mirror values of args List<Value> argMirros = new ArrayList<>(); for (Object arg : args) { if (arg instanceof String) argMirros.add(vm.mirrorOf((String) arg)); else if (arg instanceof Integer) argMirros.add(vm.mirrorOf((int) arg)); else if (arg instanceof Boolean) argMirros.add(vm.mirrorOf((boolean) arg)); else if (arg instanceof Long) argMirros.add(vm.mirrorOf((long) arg)); else if (arg instanceof Float) argMirros.add(vm.mirrorOf((float) arg)); else if (arg instanceof Double) argMirros.add(vm.mirrorOf((double) arg)); else if (arg instanceof Byte) argMirros.add(vm.mirrorOf((byte) arg)); else if (arg instanceof Character) argMirros.add(vm.mirrorOf((char) arg)); else if (arg instanceof Short) argMirros.add(vm.mirrorOf((short) arg)); else if (arg instanceof Value) argMirros.add((Value) arg); else throw new JdiInvokeException("Invalid type given in args: " + arg.getClass().getName()); } // Attempt to invoke try { return c.invokeMethod(thread.get(), method, argMirros, 0); } catch(InvalidTypeException ex) { throw new JdiInvokeException(ex, "Given type was invalid"); } catch(ClassNotLoadedException ex) { throw new JdiInvokeException(ex, "Given owner was not loaded"); } catch(IncompatibleThreadStateException ex) { throw new JdiInvokeException(ex, "Cannot invoke method on main thread"); } catch(InvocationException ex) { throw new JdiInvokeException(ex, "Generic invoke error"); } } /** * @param name * Quantified class name. * * @return Reference type for the class. {@code null} if the class is not loaded in the target * vm. */ public ClassType getType(String name) { List<ReferenceType> matches = vm.classesByName(name); if (matches.isEmpty()) return null; return (ClassType) matches.get(0); } /** * Register an action for when classes are prepared. * * @param name * Quantified name of the class. {@code null} to accept any class. * @param action * Action to run. * * @return The request. */ public ClassPrepareRequest prepare(String name, Consumer<ClassPrepareEvent> action) { ClassPrepareRequest request = vm.eventRequestManager().createClassPrepareRequest(); if (name != null) request.addClassFilter(name); prepares.put(name, action); return request; } /** * Register an action &amp; breakpoint for when a location is hit. * * @param location * Location to add breakpoint to. * @param action * Action to run. * * @return The request. */ public BreakpointRequest breakpoint(Location location, Consumer<BreakpointEvent> action) { BreakpointRequest request = vm.eventRequestManager().createBreakpointRequest(location); breakpoints.put(location, action); return request; } /** * Register a step on the given thread. * * @param thread * The thread to intercept steps in. * @param size * Step size. <ul> * <li>{@link StepRequest#STEP_LINE} for stepping to the next line. * If no debug info is givem, this defaults to {@link StepRequest#STEP_MIN}</li> * <li>{@link StepRequest#STEP_MIN} for <i>any</i> code index change.</li> * </ul> * @param depth * Step depth. * <ul> * <li>{@link StepRequest#STEP_INTO} for stepping into new frames.</li> * <li>{@link StepRequest#STEP_OVER} for stepping over new frames.</li> * <li>{@link StepRequest#STEP_OUT} for stepping out of the current frame.</li> * </ul> * @param action * Action to run. * * @return The request. */ public StepRequest step(ThreadReference thread, int size, int depth, Consumer<StepEvent> action) { StepRequest request = vm.eventRequestManager().createStepRequest(thread, size, depth); steps.put(thread, action); return request; } /** * Register an action for entering methods. * * @param action * Action to run. * * @return The request. */ public MethodEntryRequest methodEntry(Consumer<MethodEntryEvent> action) { MethodEntryRequest request = vm.eventRequestManager().createMethodEntryRequest(); methodEntries.add(action); return request; } /** * Register an action for exiting methods. * * @param action * Action to run. * * @return The request. */ public MethodExitRequest methodExit(Consumer<MethodExitEvent> action) { MethodExitRequest request = vm.eventRequestManager().createMethodExitRequest(); methodExits.add(action); return request; } /** * Register an action for thrown exceptions. * * @param type * The type of exception to catch. {@code null} for any time. * @param caught * Call actions on caught exceptions. * @param uncaught * Call actions on uncaught exceptions. * @param action * Action to run. * * @return The request. */ public ExceptionRequest exception(ReferenceType type, boolean caught, boolean uncaught, Consumer<ExceptionEvent> action) { ExceptionRequest request = vm.eventRequestManager() .createExceptionRequest(type, caught, uncaught); exceptions.add(action); return request; } // TODO: Registering methods for the events that don't have 'em /** * Set IO handlers. * * @param out * Stream to send VM's output to. May be {@code null}. */ public void setup(PrintStream out) { this.out = out; } /** * @return Debugged process. */ public Process getProcess() { return vm.process(); } /** * Begin handling vm events. */ public void start() { // Used to keep track of if we're still attached to the VM. boolean[] running = {true}; try { // Start redirecting process output new Thread(() -> { try { InputStream pOutput = vm.process().getInputStream(); InputStream pErr = vm.process().getErrorStream(); byte[] buffer = new byte[4096]; while(running[0] || vm.process().isAlive()) { // Handle receiving output if(out != null) { int size = pOutput.available(); if(size > 0) { int n = pOutput.read(buffer, 0, Math.min(size, buffer.length)); out.println(new String(buffer, 0, n)); } size = pErr.available(); if(size > 0) { int n = pErr.read(buffer, 0, Math.min(size, buffer.length)); out.println(new String(buffer, 0, n)); } } Thread.sleep(PRINT_THREAD_DELAY); } } catch(InterruptedException | IOException ex) { error(ex, "Exception occurred while processing VM IPC"); } }).start(); // Handle vm events eventLoop(); } catch(VMDisconnectedException ex) { // Expected // - Stop print redirect thread running[0] = false; } catch(InterruptedException ex) { error(ex, "Failed processing VM event queue"); } } private void eventLoop() throws VMDisconnectedException, InterruptedException { vm.resume(); EventSet eventSet = null; while((eventSet = vm.eventQueue().remove()) != null) { for(Event event : eventSet) { // Key is the first interface parent because they all are interface impls. Class<?> key = event.getClass(); if (key.getInterfaces().length > 0) key = key.getInterfaces()[0]; // Run consumers if any found. Consumer<Event> consumer = eventConsumers.get(key); if(consumer != null) consumer.accept(event); // continue VM operations vm.resume(); } } } private void setupEventConsumers() { eventConsumers.put(ClassPrepareEvent.class, (event) -> { ClassPrepareEvent prepare = (ClassPrepareEvent) event; String key = prepare.referenceType().name(); prepares.get(key).forEach(consumer -> consumer.accept(prepare)); }); eventConsumers.put(ClassUnloadEvent.class, (event) -> { ClassUnloadEvent unload = (ClassUnloadEvent) event; String key = unload.className(); unloads.get(key).forEach(consumer -> consumer.accept(unload)); }); eventConsumers.put(BreakpointEvent.class, (event) -> { BreakpointEvent breakpoint = (BreakpointEvent) event; Location key = breakpoint.location(); breakpoints.get(key).forEach(consumer -> consumer.accept(breakpoint)); }); eventConsumers.put(StepEvent.class, (event) -> { StepEvent step = (StepEvent) event; ThreadReference key = step.thread(); steps.get(key).forEach(consumer -> consumer.accept(step)); }); eventConsumers.put(MethodEntryEvent.class, (event) -> { MethodEntryEvent entry = (MethodEntryEvent) event; methodEntries.forEach(consumer -> consumer.accept(entry)); }); eventConsumers.put(MethodExitEvent.class, (event) -> { MethodExitEvent exit = (MethodExitEvent) event; methodExits.forEach(consumer -> consumer.accept(exit)); }); eventConsumers.put(MonitorWaitEvent.class, (event) -> { MonitorWaitEvent wait = (MonitorWaitEvent) event; monitorWaits.forEach(consumer -> consumer.accept(wait)); }); eventConsumers.put(MonitorWaitedEvent.class, (event) -> { MonitorWaitedEvent wait = (MonitorWaitedEvent) event; monitorWaiteds.forEach(consumer -> consumer.accept(wait)); }); eventConsumers.put(MonitorContendedEnterEvent.class, (event) -> { MonitorContendedEnterEvent enter = (MonitorContendedEnterEvent) event; monitorContendEnters.forEach(consumer -> consumer.accept(enter)); }); eventConsumers.put(MonitorContendedEnteredEvent.class, (event) -> { MonitorContendedEnteredEvent entered = (MonitorContendedEnteredEvent) event; monitorContendEntereds.forEach(consumer -> consumer.accept(entered)); }); eventConsumers.put(AccessWatchpointEvent.class, (event) -> { AccessWatchpointEvent acc = (AccessWatchpointEvent) event; watchpointAccesses.forEach(consumer -> consumer.accept(acc)); }); eventConsumers.put(ModificationWatchpointEvent.class, (event) -> { ModificationWatchpointEvent modify = (ModificationWatchpointEvent) event; watchpointModifies.forEach(consumer -> consumer.accept(modify)); }); eventConsumers.put(ExceptionEvent.class, (event) -> { ExceptionEvent exc = (ExceptionEvent) event; exceptions.forEach(consumer -> consumer.accept(exc)); }); eventConsumers.put(ThreadStartEvent.class, (event) -> { ThreadStartEvent start = (ThreadStartEvent) event; threadStarts.forEach(consumer -> consumer.accept(start)); }); eventConsumers.put(ThreadDeathEvent.class, (event) -> { ThreadDeathEvent death = (ThreadDeathEvent) event; threadDeaths.forEach(consumer -> consumer.accept(death)); }); eventConsumers.put(VMStartEvent.class, (event) -> { VMStartEvent start = (VMStartEvent) event; vmStarts.forEach(consumer -> consumer.accept(start)); }); eventConsumers.put(VMDisconnectEvent.class, (event) -> { VMDisconnectEvent disconnect = (VMDisconnectEvent) event; vmDisconnects.forEach(consumer -> consumer.accept(disconnect)); }); eventConsumers.put(VMDeathEvent.class, (event) -> { VMDeathEvent death = (VMDeathEvent) event; vmDeaths.forEach(consumer -> consumer.accept(death)); }); } private static <K, V> Multimap<K, V> newMultiMap() { return MultimapBuilder.hashKeys().arrayListValues().build(); } }
Resolve java 9+ compiling errors
src/main/java/me/coley/recaf/debug/VMWrap.java
Resolve java 9+ compiling errors
Java
mit
4a98434cd844895e8e4f1a88c707cb732414b6b8
0
CrypDist/CrypDist,CrypDist/CrypDist
package GUI; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; /** * Created by gizem on 06.04.2017. */ public class DataUploadScreen extends JPanel implements ActionListener{ JLabel label; GlossyButton upload; GlossyButton back; GlossyButton cancel; GlossyButton browse; JTextField pathField; JTextField dataSummary; JProgressBar progressBar; ScreenManager controller; public DataUploadScreen(ScreenManager controller) { this.controller = controller; setSize((new Dimension(600,300))); setBackground(Color.white); label = new JLabel("Select the file to be added:"); upload = new GlossyButton("Upload"); back = new GlossyButton("Back"); cancel = new GlossyButton("Cancel"); browse = new GlossyButton("Browse"); pathField = new JTextField(30); dataSummary = new JTextField(30); progressBar = new JProgressBar(); upload.addActionListener(this); back.addActionListener(this); cancel.addActionListener(this); browse.addActionListener(this); cancel.setEnabled(false); progressBar.setVisible(false); Border border = BorderFactory.createLineBorder(Color.GRAY, 1); label.setBorder(new EmptyBorder(10, 40, 10, 10)); JPanel dataPath = new JPanel(new GridLayout(2,0,0,3)); pathField.setFont(new Font("Arial", Font.LAYOUT_LEFT_TO_RIGHT,14)); dataSummary.setFont(new Font("Arial", Font.LAYOUT_LEFT_TO_RIGHT,14)); pathField.setBorder(border); pathField.setForeground(Color.GRAY); pathField.setText("Path"); pathField.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { if (pathField.getText().equals("Path")) { pathField.setText(""); pathField.setForeground(Color.BLACK); } } @Override public void focusLost(FocusEvent e) { if (pathField.getText().isEmpty()) { pathField.setForeground(Color.GRAY); pathField.setText("Path"); } } }); dataSummary.setForeground(Color.GRAY); dataSummary.setText("Data summary"); dataSummary.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { if (dataSummary.getText().equals("Data summary")) { dataSummary.setText(""); dataSummary.setForeground(Color.BLACK); } } @Override public void focusLost(FocusEvent e) { if (dataSummary.getText().isEmpty()) { dataSummary.setForeground(Color.GRAY); dataSummary.setText("Data summary"); } } }); // Create the layout structure GridLayout mainLayout = new GridLayout(5,0); this.setLayout(mainLayout); JPanel bottom = new JPanel(); bottom.setBackground(Color.white); bottom.add(back); bottom.add(cancel); JPanel browsePanel = new JPanel(new FlowLayout(FlowLayout.LEFT,20,0)); browsePanel.setBackground(Color.white); dataPath.add(pathField); dataPath.add(dataSummary); browsePanel.add(dataPath); browsePanel.add(browse); browsePanel.setBorder(BorderFactory.createEmptyBorder(10, 30, 10, 10)); JPanel layer3 = new JPanel(); layer3.setBackground(Color.white); layer3.add(upload); JPanel layer4 = new JPanel(); layer4.setBackground(Color.white); progressBar.setBackground(Color.white); layer4.add(progressBar); add(label); add(browsePanel); add(layer3); add(layer4); add(bottom); repaint(); setVisible(true); } @Override public void actionPerformed(ActionEvent e) { if (e.getSource() == upload) { { if(pathField.getText().equals("")) JOptionPane.showMessageDialog(this, "Please enter a valid path!", "Warning", JOptionPane.WARNING_MESSAGE); else { if (dataSummary.getText().isEmpty()) { JOptionPane.showMessageDialog(DataUploadScreen.this, "Please enter a data summary!", "Warning", JOptionPane.WARNING_MESSAGE); return; } else if(controller.isPathExist(pathField.getText())) { pathField.setEditable(false); browse.setEnabled(false); back.setEnabled(false); cancel.setEnabled(true); progressBar.setVisible(true); progressBar.setIndeterminate(true); Runnable myrunnable = () -> { try { String summary = dataSummary.getText(); controller.uploadData(pathField.getText(), summary); pathField.setEditable(true); browse.setEnabled(true); back.setEnabled(true); cancel.setEnabled(false); progressBar.setVisible(false); progressBar.setIndeterminate(false); pathField.setText(""); dataSummary.setText(""); } catch (InterruptedException e1) { e1.printStackTrace(); } }; new Thread(myrunnable).start(); } else { JOptionPane.showMessageDialog(this, "Please enter a valid path!", "Warning", JOptionPane.WARNING_MESSAGE); } } } } else if(e.getSource() == back) { controller.setCurrentView(new MainScreen(controller)); controller.setSize((new Dimension(1000,600))); } else if(e.getSource() == cancel) { progressBar.setVisible(false); progressBar.setIndeterminate(false); cancel.setEnabled(false); pathField.setEditable(true); browse.setEnabled(true); back.setEnabled(true); pathField.setText(""); } else { // Browse final JFileChooser fc = new JFileChooser(); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { File file = fc.getSelectedFile(); pathField.setText(file.getAbsolutePath()); } } repaint(); } }
Client/src/main/java/GUI/DataUploadScreen.java
package GUI; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; /** * Created by gizem on 06.04.2017. */ public class DataUploadScreen extends JPanel implements ActionListener{ JLabel label; GlossyButton upload; GlossyButton back; GlossyButton cancel; GlossyButton browse; JTextField pathField; JTextField dataSummary; JProgressBar progressBar; ScreenManager controller; public DataUploadScreen(ScreenManager controller) { this.controller = controller; setSize((new Dimension(600,300))); setBackground(Color.white); label = new JLabel("Select the file to be added:"); upload = new GlossyButton("Upload"); back = new GlossyButton("Back"); cancel = new GlossyButton("Cancel"); browse = new GlossyButton("Browse"); pathField = new JTextField(30); dataSummary = new JTextField(30); progressBar = new JProgressBar(); upload.addActionListener(this); back.addActionListener(this); cancel.addActionListener(this); browse.addActionListener(this); cancel.setEnabled(false); progressBar.setVisible(false); Border border = BorderFactory.createLineBorder(Color.GRAY, 1); label.setBorder(new EmptyBorder(10, 40, 10, 10)); pathField.setFont(new Font("Arial", Font.LAYOUT_LEFT_TO_RIGHT,14)); dataSummary.setFont(new Font("Arial", Font.LAYOUT_LEFT_TO_RIGHT,14)); pathField.setBorder(border); pathField.setForeground(Color.GRAY); pathField.setText("Path"); pathField.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { if (pathField.getText().equals("Path")) { pathField.setText(""); pathField.setForeground(Color.BLACK); } } @Override public void focusLost(FocusEvent e) { if (pathField.getText().isEmpty()) { pathField.setForeground(Color.GRAY); pathField.setText("Path"); } } }); dataSummary.setForeground(Color.GRAY); dataSummary.setText("Data summary"); dataSummary.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { if (dataSummary.getText().equals("Data summary")) { dataSummary.setText(""); dataSummary.setForeground(Color.BLACK); } } @Override public void focusLost(FocusEvent e) { if (dataSummary.getText().isEmpty()) { dataSummary.setForeground(Color.GRAY); dataSummary.setText("Data summary"); } } }); // Create the layout structure GridLayout mainLayout = new GridLayout(5,0); this.setLayout(mainLayout); JPanel bottom = new JPanel(); bottom.setBackground(Color.white); bottom.add(back); bottom.add(cancel); JPanel browsePanel = new JPanel(new FlowLayout(FlowLayout.LEFT,20,0)); browsePanel.setBackground(Color.white); browsePanel.add(pathField); browsePanel.add(browse); browsePanel.add(dataSummary); browsePanel.setBorder(BorderFactory.createEmptyBorder(10, 30, 10, 10)); JPanel layer3 = new JPanel(); layer3.setBackground(Color.white); layer3.add(upload); JPanel layer4 = new JPanel(); layer4.setBackground(Color.white); progressBar.setBackground(Color.white); layer4.add(progressBar); add(label); add(browsePanel); add(layer3); add(layer4); add(bottom); repaint(); setVisible(true); } @Override public void actionPerformed(ActionEvent e) { if (e.getSource() == upload) { { if(pathField.getText().equals("")) JOptionPane.showMessageDialog(this, "Please enter a valid path!", "Warning", JOptionPane.WARNING_MESSAGE); else { if (dataSummary.getText().isEmpty()) { JOptionPane.showMessageDialog(DataUploadScreen.this, "Please enter a data summary!", "Warning", JOptionPane.WARNING_MESSAGE); return; } else if(controller.isPathExist(pathField.getText())) { pathField.setEditable(false); browse.setEnabled(false); back.setEnabled(false); cancel.setEnabled(true); progressBar.setVisible(true); progressBar.setIndeterminate(true); Runnable myrunnable = () -> { try { String summary = dataSummary.getText(); controller.uploadData(pathField.getText(), summary); pathField.setEditable(true); browse.setEnabled(true); back.setEnabled(true); cancel.setEnabled(false); progressBar.setVisible(false); progressBar.setIndeterminate(false); pathField.setText(""); dataSummary.setText(""); } catch (InterruptedException e1) { e1.printStackTrace(); } }; new Thread(myrunnable).start(); } else { JOptionPane.showMessageDialog(this, "Please enter a valid path!", "Warning", JOptionPane.WARNING_MESSAGE); } } } } else if(e.getSource() == back) { controller.setCurrentView(new MainScreen(controller)); controller.setSize((new Dimension(1000,600))); } else if(e.getSource() == cancel) { progressBar.setVisible(false); progressBar.setIndeterminate(false); cancel.setEnabled(false); pathField.setEditable(true); browse.setEnabled(true); back.setEnabled(true); pathField.setText(""); } else { // Browse final JFileChooser fc = new JFileChooser(); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { File file = fc.getSelectedFile(); pathField.setText(file.getAbsolutePath()); } } repaint(); } }
upload alignment fix
Client/src/main/java/GUI/DataUploadScreen.java
upload alignment fix
Java
mit
2caf59771deb613b19bbc4c0a01fe9b62e64440f
0
dxiao/PPBunnies,dxiao/PPBunnies,dxiao/PPBunnies
package com.gravity.map; import java.util.List; import java.util.Set; import org.newdawn.slick.geom.Vector2f; import com.google.common.collect.Sets; import com.gravity.fauna.Player; import com.gravity.root.GameplayControl; public class Checkpoint { private final List<Vector2f> restartPositions; private final GameplayControl controller; private boolean passed; private final Set<Player> playersPassed; public Checkpoint(GameplayControl controller, List<Vector2f> restartPositions) { this.controller = controller; this.restartPositions = restartPositions; passed = false; playersPassed = Sets.newHashSet(); } public void playerPassed(Player player) { playersPassed.add(player); if (!passed && playersPassed.size() == restartPositions.size()) { System.out.println("Checkpoint passed."); passed = true; controller.newStartPositions(restartPositions); } } }
Platformer/src/com/gravity/map/Checkpoint.java
package com.gravity.map; import java.util.List; import java.util.Set; import org.newdawn.slick.geom.Vector2f; import com.google.common.collect.Sets; import com.gravity.fauna.Player; import com.gravity.root.GameplayControl; public class Checkpoint { private final List<Vector2f> restartPositions; private final GameplayControl controller; private boolean passed; private final Set<Player> playersPassed; public Checkpoint(GameplayControl controller, List<Vector2f> restartPositions) { this.controller = controller; this.restartPositions = restartPositions; passed = false; playersPassed = Sets.newHashSet(); } public void playerPassed(Player player) { if (!passed) System.out.println("Player passed: " + player); playersPassed.add(player); if (!passed && playersPassed.size() == restartPositions.size()) { System.out.println("Checkpoint passed."); passed = true; controller.newStartPositions(restartPositions); } } }
Removed stupid debug message.
Platformer/src/com/gravity/map/Checkpoint.java
Removed stupid debug message.
Java
mit
5db6cf1be6fab3ac502b846eb54e3c9bf5465625
0
muojp/smsproxy
package jp.muo.smsproxy; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.os.BatteryManager; import android.util.Log; public class BatteryLevelObserver extends BroadcastReceiver { private static final String BATTERY_PREFS_KEY = "bat_level"; private static final String PREFS_IS_OKAY = "is_okay"; private static final int BAT_LOW = 15; private static final int BAT_OKAY = 20; @Override public void onReceive(Context context, Intent intent) { final String action = intent.getAction(); if (action.equals(Intent.ACTION_BATTERY_LOW)) { BatteryLevelObserver.updateStatus(context); } else if (action.equals(Intent.ACTION_BATTERY_OKAY)) { BatteryLevelObserver.updateStatus(context); } } public static void updateStatus(Context context) { BroadcastReceiver batReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { context.unregisterReceiver(this); SharedPreferences prefs = context.getSharedPreferences(BATTERY_PREFS_KEY, Context.MODE_PRIVATE); int level = intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0); boolean isLowerTrigger = level <= BAT_LOW; boolean isOkayTrigger = level >= BAT_OKAY; intent.getIntExtra(BatteryManager.EXTRA_STATUS, 0); boolean isPlugged = intent.getIntExtra(BatteryManager.EXTRA_PLUGGED, 0) != 0; boolean isOkayInPrefs = prefs.getBoolean(PREFS_IS_OKAY, true); boolean isBatteryLevelOkay = isOkayInPrefs; SharedPreferences.Editor editor = prefs.edit(); if (isOkayInPrefs) { if (isLowerTrigger && !isPlugged) { SmsProxyManager mgr = new SmsProxyManager(context); Log.d(SmsProxyManager.TAG, String.format( "sending battery level notification(level: %d%%, isPlugged: %s", level, isPlugged ? "true" : "false")); if (mgr.isEnabled()) { mgr.send(SmsProxyManager.Mode.CALL, context.getString(R.string.sms_bat)); } isBatteryLevelOkay = false; } } else { if (isOkayTrigger) { isBatteryLevelOkay = true; } } Log.d(SmsProxyManager.TAG, String.format("isBatteryLevelOkay: %s", isBatteryLevelOkay ? "true" : "false")); if (isOkayInPrefs != isBatteryLevelOkay) { editor.putBoolean(PREFS_IS_OKAY, isBatteryLevelOkay); editor.commit(); } } }; context.registerReceiver(batReceiver, new IntentFilter(Intent.ACTION_BATTERY_CHANGED)); } }
src/jp/muo/smsproxy/BatteryLevelObserver.java
package jp.muo.smsproxy; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.res.Resources; import android.os.BatteryManager; import android.util.Log; import android.widget.Toast; public class BatteryLevelObserver extends BroadcastReceiver { private static final String BATTERY_PREFS_KEY = "bat_level"; private static final String PREFS_IS_OKAY = "is_okay"; private static final int BAT_LOW = 15; private static final int BAT_OKAY = 20; @Override public void onReceive(Context context, Intent intent) { final String action = intent.getAction(); if (action.equals(Intent.ACTION_BATTERY_LOW)) { BatteryLevelObserver.updateStatus(context); } else if (action.equals(Intent.ACTION_BATTERY_OKAY)) { BatteryLevelObserver.updateStatus(context); } } public static void updateStatus(Context context) { BroadcastReceiver batReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { context.unregisterReceiver(this); SharedPreferences prefs = context.getSharedPreferences(BATTERY_PREFS_KEY, Context.MODE_PRIVATE); int level = intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0); boolean isLowerTrigger = level <= BAT_LOW; boolean isOkayTrigger = level >= BAT_OKAY; intent.getIntExtra(BatteryManager.EXTRA_STATUS, 0); boolean isPlugged = intent.getIntExtra(BatteryManager.EXTRA_PLUGGED, 0) != 0; boolean isOkayInPrefs = prefs.getBoolean(PREFS_IS_OKAY, true); boolean isBatteryLevelOkay = isOkayInPrefs; SharedPreferences.Editor editor = prefs.edit(); if (isOkayInPrefs) { if (isLowerTrigger && !isPlugged) { SmsProxyManager mgr = new SmsProxyManager(context); Log.d(SmsProxyManager.TAG, String.format( "sending battery level notification(level: %d%%, isPlugged: %s", level, isPlugged ? "true" : "false")); if (mgr.isEnabled()) { mgr.send(SmsProxyManager.Mode.CALL, context.getString(R.string.sms_bat)); } isBatteryLevelOkay = false; } } else { if (isOkayTrigger) { isBatteryLevelOkay = true; } } Log.d(SmsProxyManager.TAG, String.format("isBatteryLevelOkay: %s", isBatteryLevelOkay ? "true" : "false")); if (isOkayInPrefs != isBatteryLevelOkay) { editor.putBoolean(PREFS_IS_OKAY, isBatteryLevelOkay); editor.commit(); } } }; context.registerReceiver(batReceiver, new IntentFilter(Intent.ACTION_BATTERY_CHANGED)); } }
Code cleanup
src/jp/muo/smsproxy/BatteryLevelObserver.java
Code cleanup
Java
epl-1.0
036287a374f279da00d078807f00cce8887ac79e
0
jmchilton/galaxy-bootstrap,jmchilton/galaxy-bootstrap
package com.github.jmchilton.galaxybootstrap; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.io.InputSupplier; import com.google.common.io.Resources; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Map; import org.ini4j.Ini; import org.ini4j.Profile.Section; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author John Chilton */ @SuppressWarnings("deprecation") public class GalaxyProperties { private static final Logger logger = LoggerFactory .getLogger(GalaxyProperties.class); private final Map<String, String> appProperties = Maps.newHashMap(); private final Map<String, String> serverProperties = Maps.newHashMap(); private int port = 8080; // default private String galaxyURL = adjustGalaxyURL(port); private boolean configureNestedShedTools = false; private Optional<URL> database = Optional.absent(); private static String adjustGalaxyURL(int port) { return "http://localhost:" + port + "/"; } public GalaxyProperties setAppProperty(final String name, final String value) { appProperties.put(name, value); return this; } public GalaxyProperties setServerProperty(final String name, final String value) { serverProperties.put(name, value); return this; } public GalaxyProperties prepopulateSqliteDatabase() { return prepopulateSqliteDatabase(Resources.getResource(GalaxyProperties.class, "universe.sqlite")); } /** * * @return True if it should be inferred that Galaxy is targeting a brand * new database and create_db.sh should be executed. */ public boolean isCreateDatabaseRequired() { // Logic in here could be better, database_url may be set and pointing at // an existing database - so there should be an option to disable this // without specifing a prepopulated sqlite database. return !database.isPresent(); } public GalaxyProperties prepopulateSqliteDatabase(final URL database) { this.database = Optional.of(database); // Set database auto migrate to true so database // is upgraded from revision in jar if needed. setAppProperty("database_auto_migrate", "true"); return this; } public GalaxyProperties assignFreePort() { port = IoUtils.findFreePort(); serverProperties.put("port", Integer.toString(port)); galaxyURL = adjustGalaxyURL(port); return this; } public GalaxyProperties configureNestedShedTools() { this.configureNestedShedTools = true; return this; } public void setAdminUser(final String username) { setAdminUsers(Lists.newArrayList(username)); } public void setAdminUsers(final Iterable<String> usernames) { final String usernamesStr = Joiner.on(",").join(usernames); logger.debug("Setting admin users: " + usernamesStr); setAppProperty("admin_users", usernamesStr); } public void configureGalaxy(final File galaxyRoot) { try { if(configureNestedShedTools) { final File shedConf = new File(galaxyRoot, "shed_tool_conf.xml"); final InputSupplier<InputStream> shedToolConfSupplier = Resources.newInputStreamSupplier(getClass().getResource("shed_tool_conf.xml")); Files.copy(shedToolConfSupplier, shedConf); new File(galaxyRoot, "shed_tools").mkdirs(); } final File configDirectory = new File(galaxyRoot, "config"); File sampleIni = new File(configDirectory, "galaxy.ini.sample"); if(!sampleIni.exists()) { sampleIni = new File(galaxyRoot, "universe_wsgi.ini.sample"); } final Ini ini = new Ini(new FileReader(sampleIni)); final Section appSection = ini.get("app:main"); final boolean toolsConfigured = appProperties.containsKey("tool_config_file"); if(!toolsConfigured && configureNestedShedTools) { appProperties.put("tool_config_file", "tool_conf.xml,shed_tool_conf.xml"); } // Hack to work around following bug: https://trello.com/c/nKxmP6Vc // Without this, galaxy will not startup because of problems // with tool migration framework. if(!appProperties.containsKey("running_functional_tests")) { appProperties.put("running_functional_tests", "true"); } dumpMapToSection(appSection, appProperties); final Section serverSection = ini.get("server:main"); dumpMapToSection(serverSection, serverProperties); final File configIni = new File(galaxyRoot, "universe_wsgi.ini"); ini.store(configIni); final File databaseDirectory = new File(galaxyRoot, "database"); final File sqliteDatabase = new File(databaseDirectory, "universe.sqlite"); if(this.database.isPresent()) { final URL database = this.database.get(); Files.copy(Resources.newInputStreamSupplier(database), sqliteDatabase); } } catch(final IOException ioException) { throw new RuntimeException(ioException); } } private void dumpMapToSection(final Section section, final Map<String, String> values) { section.putAll(values); } public int getPort() { return port; } public String getGalaxyURL() { return galaxyURL; } }
src/main/java/com/github/jmchilton/galaxybootstrap/GalaxyProperties.java
package com.github.jmchilton.galaxybootstrap; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.io.InputSupplier; import com.google.common.io.Resources; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Map; import org.ini4j.Ini; import org.ini4j.Profile.Section; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author John Chilton */ @SuppressWarnings("deprecation") public class GalaxyProperties { private static final Logger logger = LoggerFactory .getLogger(GalaxyProperties.class); private final Map<String, String> appProperties = Maps.newHashMap(); private final Map<String, String> serverProperties = Maps.newHashMap(); private int port = 8080; // default private String galaxyURL = adjustGalaxyURL(port); private boolean configureNestedShedTools = false; private Optional<URL> database = Optional.absent(); private static String adjustGalaxyURL(int port) { return "http://localhost:" + port + "/"; } public GalaxyProperties setAppProperty(final String name, final String value) { appProperties.put(name, value); return this; } public GalaxyProperties setServerProperty(final String name, final String value) { serverProperties.put(name, value); return this; } public GalaxyProperties prepopulateSqliteDatabase() { return prepopulateSqliteDatabase(Resources.getResource(GalaxyProperties.class, "universe.sqlite")); } /** * * @return True if it should be inferred that Galaxy is targeting a brand * new database and create_db.sh should be executed. */ public boolean isCreateDatabaseRequired() { // Logic in here could be better, database_url may be set and pointing at // an existing database - so there should be an option to disable this // without specifing a prepopulated sqlite database. return !database.isPresent(); } public GalaxyProperties prepopulateSqliteDatabase(final URL database) { this.database = Optional.of(database); // Set database auto migrate to true so database // is upgraded from revision in jar if needed. setAppProperty("database_auto_migrate", "true"); return this; } public GalaxyProperties assignFreePort() { port = IoUtils.findFreePort(); serverProperties.put("port", Integer.toString(port)); galaxyURL = adjustGalaxyURL(port); return this; } public GalaxyProperties configureNestedShedTools() { this.configureNestedShedTools = true; return this; } public void setAdminUser(final String username) { setAdminUsers(Lists.newArrayList(username)); } public void setAdminUsers(final Iterable<String> usernames) { final String usernamesStr = Joiner.on(",").join(usernames); logger.debug("Setting admin users: " + usernamesStr); setAppProperty("admin_users", usernamesStr); } public void configureGalaxy(final File galaxyRoot) { try { if(configureNestedShedTools) { final File shedConf = new File(galaxyRoot, "shed_tool_conf.xml"); final InputSupplier<InputStream> shedToolConfSupplier = Resources.newInputStreamSupplier(getClass().getResource("shed_tool_conf.xml")); Files.copy(shedToolConfSupplier, shedConf); new File(galaxyRoot, "shed_tools").mkdirs(); } final File sampleIni = new File(galaxyRoot, "universe_wsgi.ini.sample"); final Ini ini = new Ini(new FileReader(sampleIni)); final Section appSection = ini.get("app:main"); final boolean toolsConfigured = appProperties.containsKey("tool_config_file"); if(!toolsConfigured && configureNestedShedTools) { appProperties.put("tool_config_file", "tool_conf.xml,shed_tool_conf.xml"); } // Hack to work around following bug: https://trello.com/c/nKxmP6Vc // Without this, galaxy will not startup because of problems // with tool migration framework. if(!appProperties.containsKey("running_functional_tests")) { appProperties.put("running_functional_tests", "true"); } dumpMapToSection(appSection, appProperties); final Section serverSection = ini.get("server:main"); dumpMapToSection(serverSection, serverProperties); final File configIni = new File(galaxyRoot, "universe_wsgi.ini"); ini.store(configIni); final File databaseDirectory = new File(galaxyRoot, "database"); final File sqliteDatabase = new File(databaseDirectory, "universe.sqlite"); if(this.database.isPresent()) { final URL database = this.database.get(); Files.copy(Resources.newInputStreamSupplier(database), sqliteDatabase); } } catch(final IOException ioException) { throw new RuntimeException(ioException); } } private void dumpMapToSection(final Section section, final Map<String, String> values) { section.putAll(values); } public int getPort() { return port; } public String getGalaxyURL() { return galaxyURL; } }
Fix config for switch from universe_wsgi.ini -> config/galaxy.ini.
src/main/java/com/github/jmchilton/galaxybootstrap/GalaxyProperties.java
Fix config for switch from universe_wsgi.ini -> config/galaxy.ini.
Java
epl-1.0
3d839a7d359b718d7e311c3ed3bc5d38b2247e50
0
marnix/metamath-eclipse-xtext
package mm.ecxt.ui; import org.eclipse.ui.plugin.AbstractUIPlugin; import org.eclipse.xtext.resource.containers.IAllContainersState; import org.eclipse.xtext.ui.editor.model.IResourceForEditorInputFactory; import org.eclipse.xtext.ui.editor.model.ResourceForIEditorInputFactory; import org.eclipse.xtext.ui.resource.IResourceSetProvider; import org.eclipse.xtext.ui.resource.SimpleResourceSetProvider; import com.google.inject.Provider; /** * Use this class to register components to be used within the IDE. */ public class MMLanguageUiModule extends mm.ecxt.ui.AbstractMMLanguageUiModule { public MMLanguageUiModule(AbstractUIPlugin plugin) { super(plugin); } @Override public Class<? extends IResourceForEditorInputFactory> bindIResourceForEditorInputFactory() { return ResourceForIEditorInputFactory.class; } @Override public Class<? extends IResourceSetProvider> bindIResourceSetProvider() { return SimpleResourceSetProvider.class; } @Override public Provider<IAllContainersState> provideIAllContainersState() { return org.eclipse.xtext.ui.shared.Access.getWorkspaceProjectsState(); } }
mm.ecxt.ui/src/mm/ecxt/ui/MMLanguageUiModule.java
/* * generated by Xtext */ package mm.ecxt.ui; import org.eclipse.ui.plugin.AbstractUIPlugin; /** * Use this class to register components to be used within the IDE. */ public class MMLanguageUiModule extends mm.ecxt.ui.AbstractMMLanguageUiModule { public MMLanguageUiModule(AbstractUIPlugin plugin) { super(plugin); } }
Removed some JDT dependencies. Based on https://eclipse.org/Xtext/documentation/307_special_languages.html#java-independent-languages this commit overrides a few JDT dependencies that were Guice-injected.
mm.ecxt.ui/src/mm/ecxt/ui/MMLanguageUiModule.java
Removed some JDT dependencies.
Java
epl-1.0
8fdb0b3d483a4934ee62a8e6d48328423150cfb0
0
inevo/mondrian,inevo/mondrian
/* // $Id$ // This software is subject to the terms of the Common Public License // Agreement, available at the following URL: // http://www.opensource.org/licenses/cpl.html. // Copyright (C) 2006-2006 Julian Hyde // All Rights Reserved. // You must accept the terms of that agreement to use this software. */ package mondrian.test; import junit.framework.Assert; import junit.framework.AssertionFailedError; import junit.framework.ComparisonFailure; import org.eigenbase.xom.XMLOutput; import org.w3c.dom.*; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.*; import java.util.HashMap; import java.util.Map; import mondrian.olap.Util; /** * A collection of resources used by tests. * * <p>Loads files containing test input and output into memory. * If there are differences, writes out a log file containing the actual * output. * * <p>Typical usage is as follows:<ol> * <li>A testcase class defines a method<blockquote><code><pre> * * package com.acme.test; * * public class MyTest extends TestCase { * public DiffRepository getDiffRepos() { * return DiffRepository.lookup(MyTest.class); * } * * public void testToUpper() { * getDiffRepos().assertEquals("${result}", "${string}"); * } * public void testToLower() { * getDiffRepos().assertEquals("Multi-line\nstring", "${string}"); * } * }</pre></code></blockquote> * * There is an accompanying reference file named after the class, * <code>com/acme/test/MyTest.ref.xml</code>: * <blockquote><code><pre> * &lt;Root&gt; * &lt;TestCase name="testToUpper"&gt; * &lt;Resource name="string"&gt; * &lt;![CDATA[String to be converted to upper case]]&gt; * &lt;/Resource&gt; * &lt;Resource name="result"&gt; * &lt;![CDATA[STRING TO BE CONVERTED TO UPPER CASE]]&gt; * &lt;/Resource&gt; * &lt;/TestCase&gt; * &lt;TestCase name="testToLower"&gt; * &lt;Resource name="result"&gt; * &lt;![CDATA[multi-line * string]]&gt; * &lt;/Resource&gt; * &lt;/TestCase&gt; * &lt;/Root&gt; * </pre></code></blockquote> * * <p>If any of the testcases fails, a log file is generated, called * <code>com/acme/test/MyTest.log.xml</code> containing the actual output. * The log file is otherwise identical to the reference log, so once the * log file has been verified, it can simply be copied over to become the new * reference log.</p> * * <p>If a resource or testcase does not exist, <code>DiffRepository</code> * creates them in the log file. Because DiffRepository is so forgiving, it is * very easy to create new tests and testcases.</p> * * <p>The {@link #lookup} method ensures that all test cases share the same * instance of the repository. This is important more than one one test case * fails. The shared instance ensures that the generated <code>.log.xml</code> file * contains the actual for <em>both</em> test cases. * * @author jhyde * @version $Id$ */ public class DiffRepository { private final DiffRepository baseRepos; private final DocumentBuilder docBuilder; private Document doc; private final Element root; private final File refFile; private final File logFile; /* Example XML document: <Root> <TestCase name="testFoo"> <Resource name="sql"> <![CDATA[select * from emps]]> </Resource> <Resource name="plan"> <![CDATA[MockTableImplRel.FENNEL_EXEC(table=[SALES, EMP])]]> </Resource> </TestCase> <TestCase name="testBar"> <Resource name="sql"> <![CDATA[select * from depts where deptno = 10]]> </Resource> <Resource name="output"> <![CDATA[10, 'Sales']]> </Resource> </TestCase> </Root> */ private static final String RootTag = "Root"; private static final String TestCaseTag = "TestCase"; private static final String TestCaseNameAttr = "name"; private static final String ResourceTag = "Resource"; private static final String ResourceNameAttr = "name"; /** * Holds one diff-repository per class. It is necessary for all testcases * in the same class to share the same diff-repository: if the * repos gets loaded once per testcase, then only one diff is recorded. */ private static final Map mapClassToRepos = new HashMap(); private static File findFile(Class clazz, final String suffix) { // The reference file for class "com.foo.Bar" is "com/foo/Bar.ref.xml" String rest = clazz.getName().replace('.', File.separatorChar) + suffix; File fileBase = getFileBase(clazz); return new File(fileBase, rest); } /** * Returns the base directory relative to which test logs are stored. * * <p>Deduces the directory based upon the current directory. * If the current directory is "/home/jhyde/open/mondrian/intellij", * returns "/home/jhyde/open/mondrian/testsrc". */ private static File getFileBase(Class clazz) { String javaFileName = clazz.getName().replace('.', File.separatorChar) + ".java"; File file = new File(System.getProperty("user.dir")); while (true) { File file2 = new File(new File(file, "testsrc"), "main"); if (file2.isDirectory() && new File(file2, javaFileName).exists()) { return file2; } file = file.getParentFile(); if (file == null) { throw new RuntimeException("cannot find base dir"); } } } public DiffRepository(File refFile, File logFile, DiffRepository baseRepos) { this.baseRepos = baseRepos; if (refFile == null) { throw new IllegalArgumentException("url must not be null"); } this.refFile = refFile; Util.discard(this.refFile); this.logFile = logFile; // Load the document. DocumentBuilderFactory fac = DocumentBuilderFactory.newInstance(); try { this.docBuilder = fac.newDocumentBuilder(); if (refFile.exists()) { // Parse the reference file. this.doc = docBuilder.parse(new FileInputStream(refFile)); // Don't write a log file yet -- as far as we know, it's still // identical. } else { // There's no reference file. Create and write a log file. this.doc = docBuilder.newDocument(); this.doc.appendChild( doc.createElement(RootTag)); flushDoc(); } this.root = doc.getDocumentElement(); if (!root.getNodeName().equals(RootTag)) { throw new RuntimeException( "expected root element of type '" + RootTag + "', but found '" + root.getNodeName() + "'"); } } catch (ParserConfigurationException e) { throw Util.newInternal(e, "error while creating xml parser"); } catch (IOException e) { throw Util.newInternal(e, "error while creating xml parser"); } catch (SAXException e) { throw Util.newInternal(e, "error while creating xml parser"); } } /** * Expands a string containing one or more variables. * (Currently only works if there is one variable.) */ public String expand(String tag, String text) { if (text == null) { return null; } else if (text.startsWith("${") && text.endsWith("}")) { final String testCaseName = getCurrentTestCaseName(true); final String token = text.substring(2, text.length() - 1); if (tag == null) { tag = token; } assert token.startsWith(tag) : "token '" + token + "' does not match tag '" + tag + "'"; final String expanded = get(testCaseName, token); if (expanded == null) { // Token is not specified. Return the original text: this will // cause a diff, and the actual value will be written to the // log file. return text; } return expanded; } else { // Make sure what appears in the resource file is consistent with // what is in the Java. It helps to have a redundant copy in the // resource file. final String testCaseName = getCurrentTestCaseName(true); if (baseRepos != null && baseRepos.get(testCaseName, tag) != null) { // set in base repos; don't override } else { set(tag, text); } return text; } } /** * Sets the value of a given resource of the current testcase. * * @param resourceName Name of the resource, e.g. "sql" * @param value Value of the resource */ public synchronized void set(String resourceName, String value) { assert resourceName != null; final String testCaseName = getCurrentTestCaseName(true); update(testCaseName, resourceName, value); } public void amend(String expected, String actual) { if (expected.startsWith("${") && expected.endsWith("}")) { String token = expected.substring(2, expected.length() - 1); set(token, actual); } else { // do nothing } } /** * Returns a given resource from a given testcase. * * @param testCaseName Name of test case, e.g. "testFoo" * @param resourceName Name of resource, e.g. "sql", "plan" * @return The value of the resource, or null if not found */ private String get( final String testCaseName, String resourceName) { Element testCaseElement = getTestCaseElement(root, testCaseName); if (testCaseElement == null) { if (baseRepos != null) { return baseRepos.get(testCaseName, resourceName); } else { return null; } } final Element resourceElement = getResourceElement(testCaseElement, resourceName); if (resourceElement != null) { return getText(resourceElement); } return null; } /** * Returns the text under an element. */ private static String getText(Element element) { // If there is a <![CDATA[ ... ]]> child, return its text and ignore // all other child elements. final NodeList childNodes = element.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node node = childNodes.item(i); if (node instanceof CDATASection) { return node.getNodeValue(); } } // Otherwise return all the text under this element (including // whitespace). StringBuffer buf = new StringBuffer(); for (int i = 0; i < childNodes.getLength(); i++) { Node node = childNodes.item(i); if (node instanceof Text) { buf.append(((Text) node).getWholeText()); } } return buf.toString(); } /** * Returns the &lt;TestCase&gt; element corresponding to the current * test case. * * @param root Root element of the document * @param testCaseName Name of test case * @return TestCase element, or null if not found */ private static Element getTestCaseElement( final Element root, final String testCaseName) { final NodeList childNodes = root.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); if (child.getNodeName().equals(TestCaseTag)) { Element testCase = (Element) child; if (testCaseName.equals( testCase.getAttribute(TestCaseNameAttr))) { return testCase; } } } return null; } /** * Returns the name of the current testcase by looking up the call * stack for a method whose name starts with "test", for example * "testFoo". * * @param fail Whether to fail if no method is found * @return Name of current testcase, or null if not found */ private String getCurrentTestCaseName(boolean fail) { // Clever, this. Dump the stack and look up it for a method which // looks like a testcase name, e.g. "testFoo". final StackTraceElement[] stackTrace; Throwable runtimeException = new Throwable(); runtimeException.fillInStackTrace(); stackTrace = runtimeException.getStackTrace(); for (int i = 0; i < stackTrace.length; i++) { StackTraceElement stackTraceElement = stackTrace[i]; final String methodName = stackTraceElement.getMethodName(); if (methodName.startsWith("test")) { return methodName; } } if (fail) { throw new RuntimeException("no testcase on current callstack"); } else { return null; } } public void assertEquals(String tag, String expected, String actual) { final String testCaseName = getCurrentTestCaseName(true); String expected2 = expand(tag, expected); if (expected2 == null) { update(testCaseName, expected, actual); throw new AssertionFailedError( "reference file does not contain resource '" + expected + "' for testcase '" + testCaseName + "'"); } else { try { // TODO jvs 25-Apr-2006: reuse bulk of // DiffTestCase.diffTestLog here; besides newline // insensitivity, it can report on the line // at which the first diff occurs, which is useful // for largish snippets String expected2Canonical = expected2.replace(Util.nl, "\n"); String actualCanonical = actual.replace(Util.nl, "\n"); Assert.assertEquals( expected2Canonical, actualCanonical); } catch (ComparisonFailure e) { amend(expected, actual); throw e; } } } /** * Creates a new document with a given resource. * * <p>This method is synchronized, in case two threads are running * test cases of this test at the same time. * * @param testCaseName * @param resourceName * @param value */ private synchronized void update( String testCaseName, String resourceName, String value) { Element testCaseElement = getTestCaseElement(root, testCaseName); if (testCaseElement == null) { testCaseElement = doc.createElement(TestCaseTag); testCaseElement.setAttribute(TestCaseNameAttr, testCaseName); root.appendChild(testCaseElement); } Element resourceElement = getResourceElement(testCaseElement, resourceName); if (resourceElement == null) { resourceElement = doc.createElement(ResourceTag); resourceElement.setAttribute(ResourceNameAttr, resourceName); testCaseElement.appendChild(resourceElement); } else { removeAllChildren(resourceElement); } resourceElement.appendChild(doc.createCDATASection(value)); // Write out the document. flushDoc(); } /** * Flush the reference document to the file system. */ private void flushDoc() { FileWriter w = null; try { w = new FileWriter(logFile); write(doc, w); } catch (IOException e) { throw Util.newInternal( e, "error while writing test reference log '" + logFile + "'"); } finally { if (w != null) { try { w.close(); } catch (IOException e) { // ignore } } } } /** * Returns a given resource from a given testcase. * * @param testCaseElement The enclosing TestCase element, * e.g. <code>&lt;TestCase name="testFoo"&gt;</code>. * @param resourceName Name of resource, e.g. "sql", "plan" * @return The value of the resource, or null if not found */ private static Element getResourceElement( Element testCaseElement, String resourceName) { final NodeList childNodes = testCaseElement.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); if (child.getNodeName().equals(ResourceTag) && resourceName.equals( ((Element) child).getAttribute(ResourceNameAttr))) { return (Element) child; } } return null; } private static void removeAllChildren(Element element) { final NodeList childNodes = element.getChildNodes(); while (childNodes.getLength() > 0) { element.removeChild(childNodes.item(0)); } } /** * Serializes an XML document as text. * * <p>FIXME: I'm sure there's a library call to do this, but I'm danged * if I can find it. -- jhyde, 2006/2/9. */ private static void write(Document doc, Writer w) { final XMLOutput out = new XMLOutput(w); out.setIndentString(" "); writeNode(doc, out); } private static void writeNode(Node node, XMLOutput out) { final NodeList childNodes; switch (node.getNodeType()) { case Node.DOCUMENT_NODE: out.print("<?xml version=\"1.0\" ?>" + Util.nl); childNodes = node.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); writeNode(child, out); } // writeNode(((Document) node).getDocumentElement(), out); break; case Node.ELEMENT_NODE: Element element = (Element) node; final String tagName = element.getTagName(); out.beginBeginTag(tagName); // Attributes. final NamedNodeMap attributeMap = element.getAttributes(); for (int i = 0; i < attributeMap.getLength(); i++) { final Node att = attributeMap.item(i); out.attribute(att.getNodeName(), att.getNodeValue()); } out.endBeginTag(tagName); // Write child nodes, ignoring attributes but including text. childNodes = node.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); if (child.getNodeType() == Node.ATTRIBUTE_NODE) { continue; } writeNode(child, out); } out.endTag(tagName); break; case Node.ATTRIBUTE_NODE: out.attribute(node.getNodeName(), node.getNodeValue()); break; case Node.CDATA_SECTION_NODE: CDATASection cdata = (CDATASection) node; out.cdata(cdata.getNodeValue(), true); break; case Node.TEXT_NODE: Text text = (Text) node; final String wholeText = text.getNodeValue(); if (!isWhitespace(wholeText)) { out.cdata(wholeText, false); } break; case Node.COMMENT_NODE: Comment comment = (Comment) node; out.print("<!--" + comment.getNodeValue() + "-->" + Util.nl); break; default: throw new RuntimeException( "unexpected node type: " + node.getNodeType() + " (" + node + ")"); } } private static boolean isWhitespace(String text) { for (int i = 0, count = text.length(); i < count; ++i) { final char c = text.charAt(i); switch (c) { case ' ': case '\t': case '\n': break; default: return false; } } return true; } public static DiffRepository lookup(Class clazz) { return lookup(clazz, null); } /** * Finds the repository instance for a given class. * * <p>It is important that all testcases in a class share the same * repository instance. This ensures that, if two or more testcases fail, * the log file will contains the actual results of both testcases. * * <p>The <code>baseRepos</code> parameter is useful if the test is an * extension to a previous test. If the test class has a base class which * also has a repository, specify the repository here. DiffRepository will * look for resources in the base class if it cannot find them in this * repository. If test resources from testcases in the base class are * missing or incorrect, it will not write them to the log file -- you * probably need to fix the base test. * * @param clazz Testcase class * @param baseRepos Base class of test class * @return The diff repository shared between testcases in this class. */ public static DiffRepository lookup(Class clazz, DiffRepository baseRepos) { DiffRepository diffRepos = (DiffRepository) mapClassToRepos.get(clazz); if (diffRepos == null) { final File refFile = findFile(clazz, ".ref.xml"); final File logFile = findFile(clazz, ".log.xml"); diffRepos = new DiffRepository(refFile, logFile, baseRepos); mapClassToRepos.put(clazz, diffRepos); } return diffRepos; } } // End DiffRepository.java
testsrc/main/mondrian/test/DiffRepository.java
/* // $Id$ // This software is subject to the terms of the Common Public License // Agreement, available at the following URL: // http://www.opensource.org/licenses/cpl.html. // Copyright (C) 2006-2006 Julian Hyde // All Rights Reserved. // You must accept the terms of that agreement to use this software. */ package mondrian.test; import junit.framework.Assert; import junit.framework.AssertionFailedError; import junit.framework.ComparisonFailure; import org.eigenbase.xom.XMLOutput; import org.w3c.dom.*; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.*; import java.util.HashMap; import java.util.Map; import mondrian.olap.Util; /** * A collection of resources used by tests. * * <p>Loads files containing test input and output into memory. * If there are differences, writes out a log file containing the actual * output. * * <p>Typical usage is as follows:<ol> * <li>A testcase class defines a method<blockquote><code><pre> * * package com.acme.test; * * public class MyTest extends TestCase { * public DiffRepository getDiffRepos() { * return DiffRepository.lookup(MyTest.class); * } * * public void testToUpper() { * getDiffRepos().assertEquals("${result}", "${string}"); * } * public void testToLower() { * getDiffRepos().assertEquals("Multi-line\nstring", "${string}"); * } * }</pre></code></blockquote> * * There is an accompanying reference file named after the class, * <code>com/acme/test/MyTest.ref.xml</code>: * <blockquote><code><pre> * &lt;Root&gt; * &lt;TestCase name="testToUpper"&gt; * &lt;Resource name="string"&gt; * &lt;![CDATA[String to be converted to upper case]]&gt; * &lt;/Resource&gt; * &lt;Resource name="result"&gt; * &lt;![CDATA[STRING TO BE CONVERTED TO UPPER CASE]]&gt; * &lt;/Resource&gt; * &lt;/TestCase&gt; * &lt;TestCase name="testToLower"&gt; * &lt;Resource name="result"&gt; * &lt;![CDATA[multi-line * string]]&gt; * &lt;/Resource&gt; * &lt;/TestCase&gt; * &lt;/Root&gt; * </pre></code></blockquote> * * <p>If any of the testcases fails, a log file is generated, called * <code>com/acme/test/MyTest.log.xml</code> containing the actual output. * The log file is otherwise identical to the reference log, so once the * log file has been verified, it can simply be copied over to become the new * reference log.</p> * * <p>If a resource or testcase does not exist, <code>DiffRepository</code> * creates them in the log file. Because DiffRepository is so forgiving, it is * very easy to create new tests and testcases.</p> * * <p>The {@link #lookup} method ensures that all test cases share the same * instance of the repository. This is important more than one one test case * fails. The shared instance ensures that the generated <code>.log.xml</code> file * contains the actual for <em>both</em> test cases. * * @author jhyde * @version $Id$ */ public class DiffRepository { private final DiffRepository baseRepos; private final DocumentBuilder docBuilder; private Document doc; private final Element root; private final File refFile; private final File logFile; /* Example XML document: <Root> <TestCase name="testFoo"> <Resource name="sql"> <![CDATA[select * from emps]]> </Resource> <Resource name="plan"> <![CDATA[MockTableImplRel.FENNEL_EXEC(table=[SALES, EMP])]]> </Resource> </TestCase> <TestCase name="testBar"> <Resource name="sql"> <![CDATA[select * from depts where deptno = 10]]> </Resource> <Resource name="output"> <![CDATA[10, 'Sales']]> </Resource> </TestCase> </Root> */ private static final String RootTag = "Root"; private static final String TestCaseTag = "TestCase"; private static final String TestCaseNameAttr = "name"; private static final String ResourceTag = "Resource"; private static final String ResourceNameAttr = "name"; /** * Holds one diff-repository per class. It is necessary for all testcases * in the same class to share the same diff-repository: if the * repos gets loaded once per testcase, then only one diff is recorded. */ private static final Map<Class, DiffRepository> mapClassToRepos = new HashMap<Class, DiffRepository>(); private static File findFile(Class clazz, final String suffix) { // The reference file for class "com.foo.Bar" is "com/foo/Bar.ref.xml" String rest = clazz.getName().replace('.', File.separatorChar) + suffix; File fileBase = getFileBase(clazz); return new File(fileBase, rest); } /** * Returns the base directory relative to which test logs are stored. * * <p>Deduces the directory based upon the current directory. * If the current directory is "/home/jhyde/open/mondrian/intellij", * returns "/home/jhyde/open/mondrian/testsrc". */ private static File getFileBase(Class clazz) { String javaFileName = clazz.getName().replace('.', File.separatorChar) + ".java"; File file = new File(System.getProperty("user.dir")); while (true) { File file2 = new File(new File(file, "testsrc"), "main"); if (file2.isDirectory() && new File(file2, javaFileName).exists()) { return file2; } file = file.getParentFile(); if (file == null) { throw new RuntimeException("cannot find base dir"); } } } public DiffRepository(File refFile, File logFile, DiffRepository baseRepos) { this.baseRepos = baseRepos; if (refFile == null) { throw new IllegalArgumentException("url must not be null"); } this.refFile = refFile; Util.discard(this.refFile); this.logFile = logFile; // Load the document. DocumentBuilderFactory fac = DocumentBuilderFactory.newInstance(); try { this.docBuilder = fac.newDocumentBuilder(); if (refFile.exists()) { // Parse the reference file. this.doc = docBuilder.parse(new FileInputStream(refFile)); // Don't write a log file yet -- as far as we know, it's still // identical. } else { // There's no reference file. Create and write a log file. this.doc = docBuilder.newDocument(); this.doc.appendChild( doc.createElement(RootTag)); flushDoc(); } this.root = doc.getDocumentElement(); if (!root.getNodeName().equals(RootTag)) { throw new RuntimeException( "expected root element of type '" + RootTag + "', but found '" + root.getNodeName() + "'"); } } catch (ParserConfigurationException e) { throw Util.newInternal(e, "error while creating xml parser"); } catch (IOException e) { throw Util.newInternal(e, "error while creating xml parser"); } catch (SAXException e) { throw Util.newInternal(e, "error while creating xml parser"); } } /** * Expands a string containing one or more variables. * (Currently only works if there is one variable.) */ public String expand(String tag, String text) { if (text == null) { return null; } else if (text.startsWith("${") && text.endsWith("}")) { final String testCaseName = getCurrentTestCaseName(true); final String token = text.substring(2, text.length() - 1); if (tag == null) { tag = token; } assert token.startsWith(tag) : "token '" + token + "' does not match tag '" + tag + "'"; final String expanded = get(testCaseName, token); if (expanded == null) { // Token is not specified. Return the original text: this will // cause a diff, and the actual value will be written to the // log file. return text; } return expanded; } else { // Make sure what appears in the resource file is consistent with // what is in the Java. It helps to have a redundant copy in the // resource file. final String testCaseName = getCurrentTestCaseName(true); if (baseRepos != null && baseRepos.get(testCaseName, tag) != null) { // set in base repos; don't override } else { set(tag, text); } return text; } } /** * Sets the value of a given resource of the current testcase. * * @param resourceName Name of the resource, e.g. "sql" * @param value Value of the resource */ public synchronized void set(String resourceName, String value) { assert resourceName != null; final String testCaseName = getCurrentTestCaseName(true); update(testCaseName, resourceName, value); } public void amend(String expected, String actual) { if (expected.startsWith("${") && expected.endsWith("}")) { String token = expected.substring(2, expected.length() - 1); set(token, actual); } else { // do nothing } } /** * Returns a given resource from a given testcase. * * @param testCaseName Name of test case, e.g. "testFoo" * @param resourceName Name of resource, e.g. "sql", "plan" * @return The value of the resource, or null if not found */ private String get( final String testCaseName, String resourceName) { Element testCaseElement = getTestCaseElement(root, testCaseName); if (testCaseElement == null) { if (baseRepos != null) { return baseRepos.get(testCaseName, resourceName); } else { return null; } } final Element resourceElement = getResourceElement(testCaseElement, resourceName); if (resourceElement != null) { return getText(resourceElement); } return null; } /** * Returns the text under an element. */ private static String getText(Element element) { // If there is a <![CDATA[ ... ]]> child, return its text and ignore // all other child elements. final NodeList childNodes = element.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node node = childNodes.item(i); if (node instanceof CDATASection) { return node.getNodeValue(); } } // Otherwise return all the text under this element (including // whitespace). StringBuffer buf = new StringBuffer(); for (int i = 0; i < childNodes.getLength(); i++) { Node node = childNodes.item(i); if (node instanceof Text) { buf.append(((Text) node).getWholeText()); } } return buf.toString(); } /** * Returns the &lt;TestCase&gt; element corresponding to the current * test case. * * @param root Root element of the document * @param testCaseName Name of test case * @return TestCase element, or null if not found */ private static Element getTestCaseElement( final Element root, final String testCaseName) { final NodeList childNodes = root.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); if (child.getNodeName().equals(TestCaseTag)) { Element testCase = (Element) child; if (testCaseName.equals( testCase.getAttribute(TestCaseNameAttr))) { return testCase; } } } return null; } /** * Returns the name of the current testcase by looking up the call * stack for a method whose name starts with "test", for example * "testFoo". * * @param fail Whether to fail if no method is found * @return Name of current testcase, or null if not found */ private String getCurrentTestCaseName(boolean fail) { // Clever, this. Dump the stack and look up it for a method which // looks like a testcase name, e.g. "testFoo". final StackTraceElement[] stackTrace; Throwable runtimeException = new Throwable(); runtimeException.fillInStackTrace(); stackTrace = runtimeException.getStackTrace(); for (int i = 0; i < stackTrace.length; i++) { StackTraceElement stackTraceElement = stackTrace[i]; final String methodName = stackTraceElement.getMethodName(); if (methodName.startsWith("test")) { return methodName; } } if (fail) { throw new RuntimeException("no testcase on current callstack"); } else { return null; } } public void assertEquals(String tag, String expected, String actual) { final String testCaseName = getCurrentTestCaseName(true); String expected2 = expand(tag, expected); if (expected2 == null) { update(testCaseName, expected, actual); throw new AssertionFailedError( "reference file does not contain resource '" + expected + "' for testcase '" + testCaseName + "'"); } else { try { // TODO jvs 25-Apr-2006: reuse bulk of // DiffTestCase.diffTestLog here; besides newline // insensitivity, it can report on the line // at which the first diff occurs, which is useful // for largish snippets String expected2Canonical = expected2.replace(Util.nl, "\n"); String actualCanonical = actual.replace(Util.nl, "\n"); Assert.assertEquals( expected2Canonical, actualCanonical); } catch (ComparisonFailure e) { amend(expected, actual); throw e; } } } /** * Creates a new document with a given resource. * * <p>This method is synchronized, in case two threads are running * test cases of this test at the same time. * * @param testCaseName * @param resourceName * @param value */ private synchronized void update( String testCaseName, String resourceName, String value) { Element testCaseElement = getTestCaseElement(root, testCaseName); if (testCaseElement == null) { testCaseElement = doc.createElement(TestCaseTag); testCaseElement.setAttribute(TestCaseNameAttr, testCaseName); root.appendChild(testCaseElement); } Element resourceElement = getResourceElement(testCaseElement, resourceName); if (resourceElement == null) { resourceElement = doc.createElement(ResourceTag); resourceElement.setAttribute(ResourceNameAttr, resourceName); testCaseElement.appendChild(resourceElement); } else { removeAllChildren(resourceElement); } resourceElement.appendChild(doc.createCDATASection(value)); // Write out the document. flushDoc(); } /** * Flush the reference document to the file system. */ private void flushDoc() { FileWriter w = null; try { w = new FileWriter(logFile); write(doc, w); } catch (IOException e) { throw Util.newInternal( e, "error while writing test reference log '" + logFile + "'"); } finally { if (w != null) { try { w.close(); } catch (IOException e) { // ignore } } } } /** * Returns a given resource from a given testcase. * * @param testCaseElement The enclosing TestCase element, * e.g. <code>&lt;TestCase name="testFoo"&gt;</code>. * @param resourceName Name of resource, e.g. "sql", "plan" * @return The value of the resource, or null if not found */ private static Element getResourceElement( Element testCaseElement, String resourceName) { final NodeList childNodes = testCaseElement.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); if (child.getNodeName().equals(ResourceTag) && resourceName.equals( ((Element) child).getAttribute(ResourceNameAttr))) { return (Element) child; } } return null; } private static void removeAllChildren(Element element) { final NodeList childNodes = element.getChildNodes(); while (childNodes.getLength() > 0) { element.removeChild(childNodes.item(0)); } } /** * Serializes an XML document as text. * * <p>FIXME: I'm sure there's a library call to do this, but I'm danged * if I can find it. -- jhyde, 2006/2/9. */ private static void write(Document doc, Writer w) { final XMLOutput out = new XMLOutput(w); out.setIndentString(" "); writeNode(doc, out); } private static void writeNode(Node node, XMLOutput out) { final NodeList childNodes; switch (node.getNodeType()) { case Node.DOCUMENT_NODE: out.print("<?xml version=\"1.0\" ?>" + Util.nl); childNodes = node.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); writeNode(child, out); } // writeNode(((Document) node).getDocumentElement(), out); break; case Node.ELEMENT_NODE: Element element = (Element) node; final String tagName = element.getTagName(); out.beginBeginTag(tagName); // Attributes. final NamedNodeMap attributeMap = element.getAttributes(); for (int i = 0; i < attributeMap.getLength(); i++) { final Node att = attributeMap.item(i); out.attribute(att.getNodeName(), att.getNodeValue()); } out.endBeginTag(tagName); // Write child nodes, ignoring attributes but including text. childNodes = node.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); if (child.getNodeType() == Node.ATTRIBUTE_NODE) { continue; } writeNode(child, out); } out.endTag(tagName); break; case Node.ATTRIBUTE_NODE: out.attribute(node.getNodeName(), node.getNodeValue()); break; case Node.CDATA_SECTION_NODE: CDATASection cdata = (CDATASection) node; out.cdata(cdata.getNodeValue(), true); break; case Node.TEXT_NODE: Text text = (Text) node; final String wholeText = text.getNodeValue(); if (!isWhitespace(wholeText)) { out.cdata(wholeText, false); } break; case Node.COMMENT_NODE: Comment comment = (Comment) node; out.print("<!--" + comment.getNodeValue() + "-->" + Util.nl); break; default: throw new RuntimeException( "unexpected node type: " + node.getNodeType() + " (" + node + ")"); } } private static boolean isWhitespace(String text) { for (int i = 0, count = text.length(); i < count; ++i) { final char c = text.charAt(i); switch (c) { case ' ': case '\t': case '\n': break; default: return false; } } return true; } public static DiffRepository lookup(Class clazz) { return lookup(clazz, null); } /** * Finds the repository instance for a given class. * * <p>It is important that all testcases in a class share the same * repository instance. This ensures that, if two or more testcases fail, * the log file will contains the actual results of both testcases. * * <p>The <code>baseRepos</code> parameter is useful if the test is an * extension to a previous test. If the test class has a base class which * also has a repository, specify the repository here. DiffRepository will * look for resources in the base class if it cannot find them in this * repository. If test resources from testcases in the base class are * missing or incorrect, it will not write them to the log file -- you * probably need to fix the base test. * * @param clazz Testcase class * @param baseRepos Base class of test class * @return The diff repository shared between testcases in this class. */ public static DiffRepository lookup(Class clazz, DiffRepository baseRepos) { DiffRepository diffRepos = mapClassToRepos.get(clazz); if (diffRepos == null) { final File refFile = findFile(clazz, ".ref.xml"); final File logFile = findFile(clazz, ".log.xml"); diffRepos = new DiffRepository(refFile, logFile, baseRepos); mapClassToRepos.put(clazz, diffRepos); } return diffRepos; } } // End DiffRepository.java
MONDRIAN: Oops, no generics yet. [git-p4: depot-paths = "//open/mondrian/": change = 6708]
testsrc/main/mondrian/test/DiffRepository.java
MONDRIAN: Oops, no generics yet.
Java
lgpl-2.1
ac13d1b46c08e1b6bd3540cb3bfe35ebae34b64c
0
exedio/copernica,exedio/copernica,exedio/copernica
package com.exedio.cope.lib; import java.math.BigDecimal; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; class IntegerColumn extends Column { static final Integer JDBC_TYPE_INT = new Integer(Types.INTEGER); static final Integer JDBC_TYPE_LONG = new Integer(Types.BIGINT); final int precision; final boolean longInsteadOfInt; final int[] allowedValues; IntegerColumn(final Table table, final String id, final boolean notNull, final int precision, final boolean longInsteadOfInt, final int[] allowedValues) { super(table, id, false, notNull, longInsteadOfInt ? JDBC_TYPE_LONG : JDBC_TYPE_INT); this.precision = precision; this.longInsteadOfInt = longInsteadOfInt; this.allowedValues = allowedValues; } /** * Creates a primary key column. */ IntegerColumn(final Table table) { // IMPLEMENTATION NOTE // // The following line specifies the column name for the generated // primary key of all cope tables to be "this". This prevents // name collisions with columns for cope attributes, since "this" // is a reserved java keyword, which cannot be used for java attributes. // // It's a string literal, since the string is not used anywhere else // in the framework. In theory, one could specify different column names // for the primary key of different tables here, and the framework // should work as well. But I never tried this. super(table, "this", true, true, JDBC_TYPE_INT); this.precision = ItemColumn.SYNTETIC_PRIMARY_KEY_PRECISION; this.longInsteadOfInt = false; this.allowedValues = null; } final String getDatabaseType() { return table.database.getIntegerType(precision); } final String getAllowedValuesConstraintID() { if(allowedValues==null) throw new RuntimeException(id); return table.database.trimName(table.id + "_" + id+ "_Val"); } final void load(final ResultSet resultSet, final int columnIndex, final Row row) throws SQLException { final Object loadedInteger = resultSet.getObject(columnIndex); //System.out.println("IntegerColumn.load "+trimmedName+" "+loadedInteger); if(loadedInteger!=null) { row.load(this, convertSQLResult(loadedInteger)); } } final Object cacheToDatabase(final Object cache) { if(cache==null) return "NULL"; else { if(longInsteadOfInt) return ((Long)cache).toString(); else return ((Integer)cache).toString(); } } final long convertSQLResult(final Object sqlInteger) { // IMPLEMENTATION NOTE for Oracle // Whether the returned object is an Integer or a BigDecimal, // depends on whether OracleStatement.defineColumnType is used or not, // so we support both here. if(sqlInteger instanceof BigDecimal) return ((BigDecimal)sqlInteger).intValue(); else { if (longInsteadOfInt) return ((Long)sqlInteger).longValue(); else return ((Integer)sqlInteger).longValue(); } } }
lib/src/com/exedio/cope/lib/IntegerColumn.java
package com.exedio.cope.lib; import java.math.BigDecimal; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; class IntegerColumn extends Column { static final Integer JDBC_TYPE_INT = new Integer(Types.INTEGER); static final Integer JDBC_TYPE_LONG = new Integer(Types.BIGINT); final int precision; final boolean longInsteadOfInt; final int[] allowedValues; IntegerColumn(final Table table, final String id, final boolean notNull, final int precision, final boolean longInsteadOfInt, final int[] allowedValues) { super(table, id, false, notNull, longInsteadOfInt ? JDBC_TYPE_LONG : JDBC_TYPE_INT); this.precision = precision; this.longInsteadOfInt = longInsteadOfInt; this.allowedValues = allowedValues; } /** * Creates a primary key column. */ IntegerColumn(final Table table) { super(table, "PK", true, true, JDBC_TYPE_INT); this.precision = ItemColumn.SYNTETIC_PRIMARY_KEY_PRECISION; this.longInsteadOfInt = false; this.allowedValues = null; } final String getDatabaseType() { return table.database.getIntegerType(precision); } final String getAllowedValuesConstraintID() { if(allowedValues==null) throw new RuntimeException(id); return table.database.trimName(table.id + "_" + id+ "_Val"); } final void load(final ResultSet resultSet, final int columnIndex, final Row row) throws SQLException { final Object loadedInteger = resultSet.getObject(columnIndex); //System.out.println("IntegerColumn.load "+trimmedName+" "+loadedInteger); if(loadedInteger!=null) { row.load(this, convertSQLResult(loadedInteger)); } } final Object cacheToDatabase(final Object cache) { if(cache==null) return "NULL"; else { if(longInsteadOfInt) return ((Long)cache).toString(); else return ((Integer)cache).toString(); } } final long convertSQLResult(final Object sqlInteger) { // IMPLEMENTATION NOTE for Oracle // Whether the returned object is an Integer or a BigDecimal, // depends on whether OracleStatement.defineColumnType is used or not, // so we support both here. if(sqlInteger instanceof BigDecimal) return ((BigDecimal)sqlInteger).intValue(); else { if (longInsteadOfInt) return ((Long)sqlInteger).longValue(); else return ((Integer)sqlInteger).longValue(); } } }
use this for primary key column git-svn-id: 9dbc6da3594b32e13bcf3b3752e372ea5bc7c2cc@1596 e7d4fc99-c606-0410-b9bf-843393a9eab7
lib/src/com/exedio/cope/lib/IntegerColumn.java
use this for primary key column
Java
lgpl-2.1
3aed6c3fda915444f02d737c28644775fc7cdac1
0
MenZil/opencms-core,victos/opencms-core,ggiudetti/opencms-core,victos/opencms-core,victos/opencms-core,ggiudetti/opencms-core,MenZil/opencms-core,it-tavis/opencms-core,it-tavis/opencms-core,sbonoc/opencms-core,ggiudetti/opencms-core,alkacon/opencms-core,serrapos/opencms-core,alkacon/opencms-core,serrapos/opencms-core,MenZil/opencms-core,gallardo/opencms-core,sbonoc/opencms-core,mediaworx/opencms-core,mediaworx/opencms-core,ggiudetti/opencms-core,gallardo/opencms-core,sbonoc/opencms-core,gallardo/opencms-core,alkacon/opencms-core,serrapos/opencms-core,serrapos/opencms-core,sbonoc/opencms-core,it-tavis/opencms-core,gallardo/opencms-core,alkacon/opencms-core,serrapos/opencms-core,victos/opencms-core,serrapos/opencms-core,mediaworx/opencms-core,it-tavis/opencms-core,MenZil/opencms-core,mediaworx/opencms-core,serrapos/opencms-core
/* * File : $Source: /alkacon/cvs/opencms/src/com/opencms/file/Attic/CmsResourceTypePage.java,v $ * Date : $Date: 2001/10/02 13:00:20 $ * Version: $Revision: 1.18 $ * * This library is part of OpenCms - * the Open Source Content Mananagement System * * Copyright (C) 2001 The OpenCms Group * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * For further information about OpenCms, please see the * OpenCms Website: http://www.opencms.org * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.opencms.file; import java.util.zip.*; import com.opencms.core.*; import com.opencms.template.*; import com.opencms.util.*; import java.util.*; import java.io.*; import org.w3c.dom.*; import com.opencms.file.genericSql.*; //import com.opencms.file.genericSql.linkmanagement.*; /** * Access class for resources of the type "Page". * * @author Alexander Lucas * @version $Revision: 1.18 $ $Date: 2001/10/02 13:00:20 $ */ public class CmsResourceTypePage implements I_CmsResourceType, Serializable, I_CmsConstants, com.opencms.workplace.I_CmsWpConstants { /** Definition of the class */ private final static String C_CLASSNAME="com.opencms.template.CmsXmlTemplate"; private static final String C_DEFAULTBODY_START = "<?xml version=\"1.0\"?>\n<XMLTEMPLATE>\n<TEMPLATE>\n<![CDATA[\n"; private static final String C_DEFAULTBODY_END = "]]></TEMPLATE>\n</XMLTEMPLATE>"; /** * The id of resource type. */ private int m_resourceType; /** * The id of the launcher used by this resource. */ private int m_launcherType; /** * The resource type name. */ private String m_resourceTypeName; /** * The class name of the Java class launched by the launcher. */ private String m_launcherClass; /** * inits a new CmsResourceType object. * * @param resourceType The id of the resource type. * @param launcherType The id of the required launcher. * @param resourceTypeName The printable name of the resource type. * @param launcherClass The Java class that should be invoked by the launcher. * This value is <b> null </b> if the default invokation class should be used. */ public void init(int resourceType, int launcherType, String resourceTypeName, String launcherClass){ m_resourceType=resourceType; m_launcherType=launcherType; m_resourceTypeName=resourceTypeName; m_launcherClass=launcherClass; } /** * Returns the name of the Java class loaded by the launcher. * This method returns <b>null</b> if the default class for this type is used. * * @return the name of the Java class. */ public String getLauncherClass() { if ((m_launcherClass == null) || (m_launcherClass.length()<1)) { return C_UNKNOWN_LAUNCHER; } else { return m_launcherClass; } } /** * Returns the launcher type needed for this resource-type. * * @return the launcher type for this resource-type. */ public int getLauncherType() { return m_launcherType; } /** * Returns the name for this resource-type. * * @return the name for this resource-type. */ public String getResourceTypeName() { return m_resourceTypeName; } /** * Returns the type of this resource-type. * * @return the type of this resource-type. */ public int getResourceType() { return m_resourceType; } /** * Returns a string-representation for this object. * This can be used for debugging. * * @return string-representation for this object. */ public String toString() { StringBuffer output=new StringBuffer(); output.append("[ResourceType]:"); output.append(m_resourceTypeName); output.append(" , Id="); output.append(m_resourceType); output.append(" , launcherType="); output.append(m_launcherType); output.append(" , launcherClass="); output.append(m_launcherClass); return output.toString(); } /** * Changes the group of a resource. * <br> * Only the group of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not existing in the offline project already, * it is read from the online project and written into the offline project. * <p> * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param newGroup the name of the new group for this resource. * @param chRekursive only used by folders. * * @exception CmsException if operation was not successful. */ public void chgrp(CmsObject cms, String filename, String newGroup, boolean chRekursive) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ cms.doChgrp(filename, newGroup); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChgrp(bodyPath, newGroup); } } } /** * Changes the flags of a resource. * <br> * Only the flags of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not existing in the offline project already, * it is read from the online project and written into the offline project. * The user may change the flags, if he is admin of the resource. * <p> * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param flags the new flags for the resource. * @param chRekursive only used by folders. * * @exception CmsException if operation was not successful. * for this resource. */ public void chmod(CmsObject cms, String filename, int flags, boolean chRekursive) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ // modify the access flags cms.doChmod(filename, flags); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ // set the internal read flag if nescessary if ((flags & C_ACCESS_INTERNAL_READ) ==0 ) { flags += C_ACCESS_INTERNAL_READ; } cms.doChmod(bodyPath, flags); } } } /** * Changes the owner of a resource. * <br> * Only the owner of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not existing in the offline project already, * it is read from the online project and written into the offline project. * The user may change this, if he is admin of the resource. * <p> * <B>Security:</B> * Access is cranted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or the user is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param newOwner the name of the new owner for this resource. * @param chRekursive only used by folders. * * @exception CmsException if operation was not successful. */ public void chown(CmsObject cms, String filename, String newOwner, boolean chRekursive) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ cms.doChown(filename, newOwner); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChown(bodyPath, newOwner); } } } /** * Changes the resourcetype of a resource. * <br> * Only the resourcetype of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * The user may change this, if he is admin of the resource. * <p> * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param newType the name of the new resourcetype for this resource. * * @exception CmsException if operation was not successful. */ public void chtype(CmsObject cms, String filename, String newType) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ cms.doChtype(filename, newType); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChtype(bodyPath, newType); } } } /** * Copies a Resource. * * @param source the complete path of the sourcefile. * @param destination the complete path of the destinationfolder. * @param keepFlags <code>true</code> if the copy should keep the source file's flags, * <code>false</code> if the copy should get the user's default flags. * * @exception CmsException if the file couldn't be copied, or the user * has not the appropriate rights to copy the file. */ public void copyResource(CmsObject cms, String source, String destination, boolean keepFlags) throws CmsException{ // Read and parse the source page file CmsFile file = cms.readFile(source); CmsXmlControlFile hXml=new CmsXmlControlFile(cms, file); // Check the path of the body file. // Don't use the checkBodyPath method here to avaoid overhead. String bodyPath=(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")))+(source); if (bodyPath.equals(hXml.getElementTemplate("body"))){ // Evaluate some path information String destinationFolder = destination.substring(0,destination.lastIndexOf("/")+1); checkFolders(cms, destinationFolder); String newbodyPath=(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")))+ destination; // we don't want to use the changeContent method here // to avoid overhead by copying, readig, parsing, setting XML and writing again. // Instead, we re-use the already parsed XML content of the source hXml.setElementTemplate("body", newbodyPath); cms.doCopyFile(source, destination); CmsFile newPageFile = cms.readFile(destination); newPageFile.setContents(hXml.getXmlText().getBytes()); cms.writeFile(newPageFile); // Now the new page file is created. Copy the body file cms.doCopyFile(bodyPath, newbodyPath); // set access flags, if neccessary } else { // The body part of the source was not found at // the default place. Leave it there, don't make // a copy and simply make a copy of the page file. // So the new page links to the old body. cms.doCopyFile(source, destination); // set access flags, if neccessary } if(!keepFlags) { setDefaultFlags(cms, destination); } } /** * Copies a resource from the online project to a new, specified project. * <br> * Copying a resource will copy the file header or folder into the specified * offline project and set its state to UNCHANGED. * * @param resource the name of the resource. * @exception CmsException if operation was not successful. */ public void copyResourceToProject(CmsObject cms, String resourceName) throws CmsException { //String resourceName = linkManager.getResourceName(resourceId); CmsFile file = cms.readFile(resourceName); cms.doCopyResourceToProject(resourceName); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doCopyResourceToProject(bodyPath); } } /** * Creates a new resource * * @param cms The CmsObject * @param folder The name of the parent folder * @param name The name of the file * @param properties The properties of the file * @param contents The file content * * @exception CmsException if operation was not successful. */ public CmsResource createResource(CmsObject cms, String folder, String name, Hashtable properties, byte[] contents) throws CmsException{ // Scan for mastertemplates Vector allMasterTemplates = cms.getFilesInFolder(C_CONTENTTEMPLATEPATH); // Select the first mastertemplate as default String masterTemplate = ""; if(allMasterTemplates.size() > 0) { masterTemplate = ((CmsFile)allMasterTemplates.elementAt(0)).getAbsolutePath(); } // Evaluate the absolute path to the new body file String bodyFolder =(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/"))) + folder; // Create the new page file CmsFile file = cms.doCreateFile(folder, name, "".getBytes(), I_CmsConstants.C_TYPE_PAGE_NAME, properties); cms.doLockResource(folder + name, true); CmsXmlControlFile pageXml = new CmsXmlControlFile(cms, file); pageXml.setTemplateClass(C_CLASSNAME); pageXml.setMasterTemplate(masterTemplate); pageXml.setElementClass("body", C_CLASSNAME); pageXml.setElementTemplate("body", bodyFolder + name); pageXml.write(); // Check, if the body path exists and create missing folders, if neccessary checkFolders(cms, folder); // Create the new body file //CmsFile bodyFile = cms.doCreateFile(bodyFolder, name, (C_DEFAULTBODY_START + new String(contents) + C_DEFAULTBODY_END).getBytes(), I_CmsConstants.C_TYPE_BODY_NAME, new Hashtable()); CmsFile bodyFile = cms.doCreateFile(bodyFolder, name, (C_DEFAULTBODY_START + new String(contents) + C_DEFAULTBODY_END).getBytes(), I_CmsConstants.C_TYPE_PLAIN_NAME, new Hashtable()); cms.doLockResource(bodyFolder + name, true); int flags = bodyFile.getAccessFlags(); if ((flags & C_ACCESS_INTERNAL_READ) ==0 ) { flags += C_ACCESS_INTERNAL_READ; } cms.chmod(bodyFile.getAbsolutePath(), flags); return file; } public CmsResource createResource(CmsObject cms, String folder, String name, Hashtable properties, byte[] contents, String masterTemplate) throws CmsException{ CmsFile resource = (CmsFile)createResource(cms, folder, name, properties, contents); CmsXmlControlFile pageXml = new CmsXmlControlFile(cms, resource); pageXml.setMasterTemplate(masterTemplate); pageXml.write(); return resource; } /** * Deletes a resource. * * @param filename the complete path of the file. * * @exception CmsException if the file couldn't be deleted, or if the user * has not the appropriate rights to delete the file. */ public void deleteResource(CmsObject cms, String filename) throws CmsException{ CmsFile file = cms.readFile(filename); cms.doDeleteFile(filename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doDeleteFile(bodyPath); } // The page file contains XML. // So there could be some data in the parser's cache. // Clear it! String currentProject = cms.getRequestContext().currentProject().getName(); CmsXmlControlFile.clearFileCache(currentProject + ":" + filename); } /** * Undeletes a resource. * * @param filename the complete path of the file. * * @exception CmsException if the file couldn't be undeleted, or if the user * has not the appropriate rights to undelete the file. */ public void undeleteResource(CmsObject cms, String filename) throws CmsException{ CmsFile file = cms.readFile(filename); cms.doUndeleteFile(filename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doUndeleteFile(bodyPath); } // The page file contains XML. // So there could be some data in the parser's cache. // Clear it! String currentProject = cms.getRequestContext().currentProject().getName(); CmsXmlControlFile.clearFileCache(currentProject + ":" + filename); } /** * When a resource has to be exported, the IDs inside the * Linkmanagement-Tags have to be changed to the corresponding URLs * * @param file is the file that has to be changed */ public CmsFile exportResource(CmsObject cms, CmsFile file) throws CmsException { //nothing to do here, because there couldnt be any Linkmanagement-Tags in a page-file (control-file) return file; } /** * When a resource has to be imported, the URLs of the * Links inside the resources have to be saved and changed to the corresponding IDs * * @param file is the file that has to be changed */ public CmsResource importResource(CmsObject cms, String source, String destination, String type, String user, String group, String access, Hashtable properties, String launcherStartClass, byte[] content, String importPath) throws CmsException { CmsFile file = null; String path = importPath + destination.substring(0, destination.lastIndexOf("/") + 1); String name = destination.substring((destination.lastIndexOf("/") + 1), destination.length()); int state = C_STATE_NEW; // this is a file // first delete the file, so it can be overwritten try { lockResource(cms, path + name, true); deleteResource(cms, path + name); state = C_STATE_CHANGED; } catch (CmsException exc) { state = C_STATE_NEW; // ignore the exception, the file dosen't exist } // now create the file // do not use createResource because then there will the body-file be created too. // that would cause an exception while importing because of trying to // duplicate an entry file = (CmsFile)cms.doCreateFile(path, name, content, type, properties); String fullname = file.getAbsolutePath(); lockResource(cms, fullname, true); try{ cms.doChmod(fullname, Integer.parseInt(access)); }catch(CmsException e){ System.out.println("chmod(" + access + ") failed "); } try{ cms.doChgrp(fullname, group); }catch(CmsException e){ System.out.println("chgrp(" + group + ") failed "); } try{ cms.doChown(fullname, user); }catch(CmsException e){ System.out.println("chown((" + user + ") failed "); } if(launcherStartClass != null){ file.setLauncherClassname(launcherStartClass); cms.writeFile(file); } return file; } /** * Locks a given resource. * <br> * A user can lock a resource, so he is the only one who can write this * resource. * * @param resource the complete path to the resource to lock. * @param force if force is <code>true</code>, a existing locking will be overwritten. * * @exception CmsException if the user has not the rights to lock this resource. * It will also be thrown, if there is a existing lock and force was set to false. */ public void lockResource(CmsObject cms, String resource, boolean force) throws CmsException{ // First read the page file. CmsFile pageFile = cms.readFile(resource); CmsUser pageLocker = null; CmsUser bodyLocker = null; // Check any locks on th page file pageLocker = getLockedBy(cms, resource); CmsUser currentUser = cms.getRequestContext().currentUser(); boolean pageLockedAndSelf = pageLocker != null && currentUser.equals(pageLocker); CmsResource bodyFile = null; String bodyPath = null; // Try to fetch the body file. try { bodyPath = readBodyPath(cms, pageFile); bodyFile = cms.readFileHeader(bodyPath); } catch(Exception e) { bodyPath = null; bodyFile = null; } // first lock the page file cms.doLockResource(resource, force); if(bodyFile != null) { // Everything with the page file is ok. We have write access. XML is valid. // Body file could be determined and fetched. // Now check further body file details (is it locked already, WHO has locked it, etc.) bodyLocker = getLockedBy(cms, bodyPath); // Lock the body, if neccessary //if((bodyLocker == null && (pageLocker == null || pageLockedAndSelf || force)) // || (bodyLocker != null && !currentUser.equals(bodyLocker) // && !(pageLocker != null && !currentUser.equals(pageLocker) && !force))) { cms.doLockResource(bodyPath, force); //} } /* // Lock the page file, if neccessary if(!(pageLockedAndSelf && (bodyFile != null && ((bodyLocker == null) || !currentUser.equals(bodyLocker))))) { cms.doLockResource(resource, force); } */ } /** * Moves a resource to the given destination. * * @param source the complete path of the sourcefile. * @param destination the complete path of the destinationfile. * * @exception CmsException if the user has not the rights to move this resource, * or if the file couldn't be moved. */ public void moveResource(CmsObject cms, String source, String destination) throws CmsException{ CmsFile file = cms.readFile(source); //String bodyPath = readBodyPath(cms, source); String bodyPath = checkBodyPath(cms, file); //int help = C_CONTENTBODYPATH.lastIndexOf("/"); //String hbodyPath=(C_CONTENTBODYPATH.substring(0,help)) + source; //if(hbodyPath.equals(bodyPath)) { if(bodyPath != null) { //help=bodyPath.lastIndexOf("/") + 1; //hbodyPath = bodyPath.substring(0,help) + destination; //String hbodyPath = bodyPath.substring(0, bodyPath.lastIndexOf("/")) + destination; String hbodyPath = C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")) + destination; checkFolders(cms, destination.substring(0, destination.lastIndexOf("/"))); cms.doMoveFile(bodyPath, hbodyPath); changeContent(cms, source, hbodyPath); } cms.doMoveFile(source, destination); } /** * Renames the file to the new name. * * @param oldname the complete path to the file which will be renamed. * @param newname the new name of the file. * * @exception CmsException if the user has not the rights * to rename the file, or if the file couldn't be renamed. */ public void renameResource(CmsObject cms, String oldname, String newname) throws CmsException{ CmsFile file = cms.readFile(oldname); String bodyPath = readBodyPath(cms, file); int help = C_CONTENTBODYPATH.lastIndexOf("/"); String hbodyPath=(C_CONTENTBODYPATH.substring(0,help)) + oldname; if(hbodyPath.equals(bodyPath)) { cms.doRenameFile(bodyPath, newname); help=bodyPath.lastIndexOf("/") + 1; hbodyPath = bodyPath.substring(0,help) + newname; changeContent(cms, oldname, hbodyPath); } cms.doRenameFile(oldname,newname); } /** * Restores a file in the current project with a version in the backup * * @param cms The CmsObject * @param versionId The version id of the resource * @param filename The name of the file to restore * * @exception CmsException Throws CmsException if operation was not succesful. */ public void restoreResource(CmsObject cms, int versionId, String filename) throws CmsException{ if(!cms.accessWrite(filename)){ throw new CmsException(filename, CmsException.C_NO_ACCESS); } CmsFile file = cms.readFile(filename); cms.doRestoreResource(versionId, filename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ try{ cms.doRestoreResource(versionId, bodyPath); } catch(CmsException e){ // do not throw an exception when there is no body for this version // maybe only the control file was changed if(e.getType() == CmsException.C_NOT_FOUND){ A_OpenCms.log(A_OpenCms.C_OPENCMS_INFO,"[CmsResourceTypePage] version "+versionId+" of "+bodyPath+" not found!"); } else { throw e; } } } } /** * Undo changes in a resource. * <br> * * @param resource the complete path to the resource to be restored. * * @exception CmsException if the user has not the rights * to write this resource. */ public void undoChanges(CmsObject cms, String resource) throws CmsException{ if(!cms.accessWrite(resource)){ throw new CmsException(resource, CmsException.C_NO_ACCESS); } CmsFile file = cms.readFile(resource); cms.doUndoChanges(resource); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doUndoChanges(bodyPath); } } /** * Unlocks a resource. * <br> * A user can unlock a resource, so other users may lock this file. * * @param resource the complete path to the resource to be unlocked. * * @exception CmsException if the user has not the rights * to unlock this resource. */ public void unlockResource(CmsObject cms, String resource) throws CmsException{ // First read the page file. CmsFile pageFile = cms.readFile(resource); CmsUser pageLocker = null; CmsUser bodyLocker = null; // Check any locks on th page file pageLocker = getLockedBy(cms, resource); CmsUser currentUser = cms.getRequestContext().currentUser(); CmsResource bodyFile = null; String bodyPath = null; // Try to fetch the body file. try { bodyPath = readBodyPath(cms, pageFile); bodyFile = cms.readFileHeader(bodyPath); } catch(Exception e) { bodyPath = null; bodyFile = null; } cms.doUnlockResource(resource); if(bodyFile != null) { // Everything with the page file is ok. We have write access. XML is valid. // Body file could be determined and fetched. // Now check further body file details (is it locked already, WHO has locked it, etc.) bodyLocker = getLockedBy(cms, bodyPath); // Unlock the body, if neccessary //if((pageLocker == null || pageLocker.equals(currentUser)) && (bodyLocker != null)) { cms.doUnlockResource(bodyPath); //} } // Unlock the page file, if neccessary //if(pageLocker != null || bodyLocker == null) { //cms.doUnlockResource(resource); //} } /** * method to check get the real body path from the content file * * @param cms The CmsObject, to access the XML read file. * @param file File in which the body path is stored. This should really * be a CmsFile object an not a file header. This won't be checked for * performance reasons. */ private String readBodyPath(CmsObject cms, CmsFile file) throws CmsException{ CmsXmlControlFile hXml=new CmsXmlControlFile(cms, file); String body = ""; try{ body = hXml.getElementTemplate("body"); } catch (CmsException exc){ // could not read body } return body; } /** * method to check get the real body path from the content file * * @param cms The CmsObject, to access the XML read file. * @param file File in which the body path is stored. */ private String checkBodyPath(CmsObject cms, CmsFile file) throws CmsException { String result =(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")))+(file.getAbsolutePath()); if (!result.equals(readBodyPath(cms, (CmsFile)file))){ result = null; } return result; } private CmsUser getLockedBy(CmsObject cms, String filename) { CmsUser result = null; try { result = cms.lockedBy(filename); if(result.getId() == -1) { result = null; } } catch(Exception e) { result = null; } return result; } /** * This method changes the path of the body file in the xml conten file * if file type name is page * * @param cms The CmsObject * @param file The XML content file * @param bodypath the new XML content entry * @exception Exception if something goes wrong. */ private void changeContent(CmsObject cms, String filename, String bodypath) throws CmsException { CmsFile file=cms.readFile(filename); CmsXmlControlFile hXml=new CmsXmlControlFile(cms, file); hXml.setElementTemplate("body", bodypath); hXml.write(); } /** * This method checks if all nescessary folders are exisitng in the content body * folder and creates the missing ones. <br> * All page contents files are stored in the content body folder in a mirrored directory * structure of the OpenCms filesystem. Therefor it is nescessary to create the * missing folders when a new page document is createg. * @param cms The CmsObject * @param path The path in the CmsFilesystem where the new page should be created. * @exception CmsException if something goes wrong. */ private void checkFolders(CmsObject cms, String path) throws CmsException { String completePath=C_CONTENTBODYPATH; StringTokenizer t=new StringTokenizer(path,"/"); String correspFolder = "/"; // check if all folders are there while (t.hasMoreTokens()) { String foldername=t.nextToken(); correspFolder = correspFolder+foldername+"/"; try { // try to read the folder. if this fails, an exception is thrown cms.readFolder(completePath+foldername+"/"); } catch (CmsException e) { // the folder could not be read, so create it. String orgFolder=completePath+foldername+"/"; orgFolder=orgFolder.substring(C_CONTENTBODYPATH.length()-1); CmsFolder newfolder=cms.doCreateFolder(completePath,foldername); CmsFolder folder=cms.readFolder(orgFolder); cms.doLockResource(newfolder.getAbsolutePath(),false); cms.doChgrp(newfolder.getAbsolutePath(),cms.readGroup(folder).getName()); cms.doChmod(newfolder.getAbsolutePath(),folder.getAccessFlags()); cms.doChown(newfolder.getAbsolutePath(),cms.readOwner(folder).getName()); try{ CmsFolder correspondingFolder = cms.readFolder(correspFolder); if(!correspondingFolder.isLocked()){ cms.doUnlockResource(newfolder.getAbsolutePath()); } } catch (CmsException ex){ // unable to unlock folder if parent folder is locked } } completePath+=foldername+"/"; } } /** * Set the access flags of the copied resource to the default values. * @param cms The CmsObject. * @param filename The name of the file. * @exception Throws CmsException if something goes wrong. */ private void setDefaultFlags(CmsObject cms, String filename) throws CmsException { Hashtable startSettings=null; Integer accessFlags=null; startSettings=(Hashtable)cms.getRequestContext().currentUser().getAdditionalInfo(C_ADDITIONAL_INFO_STARTSETTINGS); if (startSettings != null) { accessFlags=(Integer)startSettings.get(C_START_ACCESSFLAGS); } if (accessFlags == null) { accessFlags = new Integer(C_ACCESS_DEFAULT_FLAGS); } chmod(cms, filename, accessFlags.intValue(), false); } /** * Changes the project-id of the resource to the new project * for publishing the resource directly * * @param newProjectId The Id of the new project * @param resourcename The name of the resource to change */ public void changeLockedInProject(CmsObject cms, int newProjectId, String resourcename) throws CmsException{ CmsFile file = cms.readFile(resourcename); cms.doChangeLockedInProject(newProjectId, resourcename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChangeLockedInProject(newProjectId, bodyPath); } // The page file contains XML. // So there could be some data in the parser's cache. // Clear it! String currentProject = cms.getRequestContext().currentProject().getName(); CmsXmlControlFile.clearFileCache(currentProject + ":" + resourcename); } }
src/com/opencms/file/CmsResourceTypePage.java
/* * File : $Source: /alkacon/cvs/opencms/src/com/opencms/file/Attic/CmsResourceTypePage.java,v $ * Date : $Date: 2001/08/16 09:49:02 $ * Version: $Revision: 1.17 $ * * This library is part of OpenCms - * the Open Source Content Mananagement System * * Copyright (C) 2001 The OpenCms Group * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * For further information about OpenCms, please see the * OpenCms Website: http://www.opencms.org * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.opencms.file; import java.util.zip.*; import com.opencms.core.*; import com.opencms.template.*; import com.opencms.util.*; import java.util.*; import java.io.*; import org.w3c.dom.*; import com.opencms.file.genericSql.*; //import com.opencms.file.genericSql.linkmanagement.*; /** * Access class for resources of the type "Page". * * @author Alexander Lucas * @version $Revision: 1.17 $ $Date: 2001/08/16 09:49:02 $ */ public class CmsResourceTypePage implements I_CmsResourceType, Serializable, I_CmsConstants, com.opencms.workplace.I_CmsWpConstants { /** Definition of the class */ private final static String C_CLASSNAME="com.opencms.template.CmsXmlTemplate"; private static final String C_DEFAULTBODY_START = "<?xml version=\"1.0\"?>\n<XMLTEMPLATE>\n<TEMPLATE>\n<![CDATA[\n"; private static final String C_DEFAULTBODY_END = "]]></TEMPLATE>\n</XMLTEMPLATE>"; /** * The id of resource type. */ private int m_resourceType; /** * The id of the launcher used by this resource. */ private int m_launcherType; /** * The resource type name. */ private String m_resourceTypeName; /** * The class name of the Java class launched by the launcher. */ private String m_launcherClass; /** * inits a new CmsResourceType object. * * @param resourceType The id of the resource type. * @param launcherType The id of the required launcher. * @param resourceTypeName The printable name of the resource type. * @param launcherClass The Java class that should be invoked by the launcher. * This value is <b> null </b> if the default invokation class should be used. */ public void init(int resourceType, int launcherType, String resourceTypeName, String launcherClass){ m_resourceType=resourceType; m_launcherType=launcherType; m_resourceTypeName=resourceTypeName; m_launcherClass=launcherClass; } /** * Returns the name of the Java class loaded by the launcher. * This method returns <b>null</b> if the default class for this type is used. * * @return the name of the Java class. */ public String getLauncherClass() { if ((m_launcherClass == null) || (m_launcherClass.length()<1)) { return C_UNKNOWN_LAUNCHER; } else { return m_launcherClass; } } /** * Returns the launcher type needed for this resource-type. * * @return the launcher type for this resource-type. */ public int getLauncherType() { return m_launcherType; } /** * Returns the name for this resource-type. * * @return the name for this resource-type. */ public String getResourceTypeName() { return m_resourceTypeName; } /** * Returns the type of this resource-type. * * @return the type of this resource-type. */ public int getResourceType() { return m_resourceType; } /** * Returns a string-representation for this object. * This can be used for debugging. * * @return string-representation for this object. */ public String toString() { StringBuffer output=new StringBuffer(); output.append("[ResourceType]:"); output.append(m_resourceTypeName); output.append(" , Id="); output.append(m_resourceType); output.append(" , launcherType="); output.append(m_launcherType); output.append(" , launcherClass="); output.append(m_launcherClass); return output.toString(); } /** * Changes the group of a resource. * <br> * Only the group of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not existing in the offline project already, * it is read from the online project and written into the offline project. * <p> * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param newGroup the name of the new group for this resource. * @param chRekursive only used by folders. * * @exception CmsException if operation was not successful. */ public void chgrp(CmsObject cms, String filename, String newGroup, boolean chRekursive) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ cms.doChgrp(filename, newGroup); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChgrp(bodyPath, newGroup); } } } /** * Changes the flags of a resource. * <br> * Only the flags of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not existing in the offline project already, * it is read from the online project and written into the offline project. * The user may change the flags, if he is admin of the resource. * <p> * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param flags the new flags for the resource. * @param chRekursive only used by folders. * * @exception CmsException if operation was not successful. * for this resource. */ public void chmod(CmsObject cms, String filename, int flags, boolean chRekursive) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ // modify the access flags cms.doChmod(filename, flags); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ // set the internal read flag if nescessary if ((flags & C_ACCESS_INTERNAL_READ) ==0 ) { flags += C_ACCESS_INTERNAL_READ; } cms.doChmod(bodyPath, flags); } } } /** * Changes the owner of a resource. * <br> * Only the owner of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not existing in the offline project already, * it is read from the online project and written into the offline project. * The user may change this, if he is admin of the resource. * <p> * <B>Security:</B> * Access is cranted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or the user is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param newOwner the name of the new owner for this resource. * @param chRekursive only used by folders. * * @exception CmsException if operation was not successful. */ public void chown(CmsObject cms, String filename, String newOwner, boolean chRekursive) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ cms.doChown(filename, newOwner); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChown(bodyPath, newOwner); } } } /** * Changes the resourcetype of a resource. * <br> * Only the resourcetype of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * The user may change this, if he is admin of the resource. * <p> * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename the complete path to the resource. * @param newType the name of the new resourcetype for this resource. * * @exception CmsException if operation was not successful. */ public void chtype(CmsObject cms, String filename, String newType) throws CmsException{ CmsFile file = cms.readFile(filename); // check if the current user has the right to change the group of the // resource. Only the owner of a file and the admin are allowed to do this. if ((cms.getRequestContext().currentUser().equals(cms.readOwner(file))) || (cms.userInGroup(cms.getRequestContext().currentUser().getName(), C_GROUP_ADMIN))){ cms.doChtype(filename, newType); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChtype(bodyPath, newType); } } } /** * Copies a Resource. * * @param source the complete path of the sourcefile. * @param destination the complete path of the destinationfolder. * @param keepFlags <code>true</code> if the copy should keep the source file's flags, * <code>false</code> if the copy should get the user's default flags. * * @exception CmsException if the file couldn't be copied, or the user * has not the appropriate rights to copy the file. */ public void copyResource(CmsObject cms, String source, String destination, boolean keepFlags) throws CmsException{ // Read and parse the source page file CmsFile file = cms.readFile(source); CmsXmlControlFile hXml=new CmsXmlControlFile(cms, file); // Check the path of the body file. // Don't use the checkBodyPath method here to avaoid overhead. String bodyPath=(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")))+(source); if (bodyPath.equals(hXml.getElementTemplate("body"))){ // Evaluate some path information String destinationFolder = destination.substring(0,destination.lastIndexOf("/")+1); checkFolders(cms, destinationFolder); String newbodyPath=(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")))+ destination; // we don't want to use the changeContent method here // to avoid overhead by copying, readig, parsing, setting XML and writing again. // Instead, we re-use the already parsed XML content of the source hXml.setElementTemplate("body", newbodyPath); cms.doCopyFile(source, destination); CmsFile newPageFile = cms.readFile(destination); newPageFile.setContents(hXml.getXmlText().getBytes()); cms.writeFile(newPageFile); // Now the new page file is created. Copy the body file cms.doCopyFile(bodyPath, newbodyPath); // set access flags, if neccessary } else { // The body part of the source was not found at // the default place. Leave it there, don't make // a copy and simply make a copy of the page file. // So the new page links to the old body. cms.doCopyFile(source, destination); // set access flags, if neccessary } if(!keepFlags) { setDefaultFlags(cms, destination); } } /** * Copies a resource from the online project to a new, specified project. * <br> * Copying a resource will copy the file header or folder into the specified * offline project and set its state to UNCHANGED. * * @param resource the name of the resource. * @exception CmsException if operation was not successful. */ public void copyResourceToProject(CmsObject cms, String resourceName) throws CmsException { //String resourceName = linkManager.getResourceName(resourceId); CmsFile file = cms.readFile(resourceName); cms.doCopyResourceToProject(resourceName); //check if the file type name is page String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doCopyResourceToProject(bodyPath); } } /** * Creates a new resource * * @param cms The CmsObject * @param folder The name of the parent folder * @param name The name of the file * @param properties The properties of the file * @param contents The file content * * @exception CmsException if operation was not successful. */ public CmsResource createResource(CmsObject cms, String folder, String name, Hashtable properties, byte[] contents) throws CmsException{ // Scan for mastertemplates Vector allMasterTemplates = cms.getFilesInFolder(C_CONTENTTEMPLATEPATH); // Select the first mastertemplate as default String masterTemplate = ""; if(allMasterTemplates.size() > 0) { masterTemplate = ((CmsFile)allMasterTemplates.elementAt(0)).getAbsolutePath(); } // Evaluate the absolute path to the new body file String bodyFolder =(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/"))) + folder; // Create the new page file CmsFile file = cms.doCreateFile(folder, name, "".getBytes(), I_CmsConstants.C_TYPE_PAGE_NAME, properties); cms.doLockResource(folder + name, true); CmsXmlControlFile pageXml = new CmsXmlControlFile(cms, file); pageXml.setTemplateClass(C_CLASSNAME); pageXml.setMasterTemplate(masterTemplate); pageXml.setElementClass("body", C_CLASSNAME); pageXml.setElementTemplate("body", bodyFolder + name); pageXml.write(); // Check, if the body path exists and create missing folders, if neccessary checkFolders(cms, folder); // Create the new body file //CmsFile bodyFile = cms.doCreateFile(bodyFolder, name, (C_DEFAULTBODY_START + new String(contents) + C_DEFAULTBODY_END).getBytes(), I_CmsConstants.C_TYPE_BODY_NAME, new Hashtable()); CmsFile bodyFile = cms.doCreateFile(bodyFolder, name, (C_DEFAULTBODY_START + new String(contents) + C_DEFAULTBODY_END).getBytes(), I_CmsConstants.C_TYPE_PLAIN_NAME, new Hashtable()); cms.doLockResource(bodyFolder + name, true); int flags = bodyFile.getAccessFlags(); if ((flags & C_ACCESS_INTERNAL_READ) ==0 ) { flags += C_ACCESS_INTERNAL_READ; } cms.chmod(bodyFile.getAbsolutePath(), flags); return file; } public CmsResource createResource(CmsObject cms, String folder, String name, Hashtable properties, byte[] contents, String masterTemplate) throws CmsException{ CmsFile resource = (CmsFile)createResource(cms, folder, name, properties, contents); CmsXmlControlFile pageXml = new CmsXmlControlFile(cms, resource); pageXml.setMasterTemplate(masterTemplate); pageXml.write(); return resource; } /** * Deletes a resource. * * @param filename the complete path of the file. * * @exception CmsException if the file couldn't be deleted, or if the user * has not the appropriate rights to delete the file. */ public void deleteResource(CmsObject cms, String filename) throws CmsException{ CmsFile file = cms.readFile(filename); cms.doDeleteFile(filename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doDeleteFile(bodyPath); } // The page file contains XML. // So there could be some data in the parser's cache. // Clear it! String currentProject = cms.getRequestContext().currentProject().getName(); CmsXmlControlFile.clearFileCache(currentProject + ":" + filename); } /** * Undeletes a resource. * * @param filename the complete path of the file. * * @exception CmsException if the file couldn't be undeleted, or if the user * has not the appropriate rights to undelete the file. */ public void undeleteResource(CmsObject cms, String filename) throws CmsException{ CmsFile file = cms.readFile(filename); cms.doUndeleteFile(filename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doUndeleteFile(bodyPath); } // The page file contains XML. // So there could be some data in the parser's cache. // Clear it! String currentProject = cms.getRequestContext().currentProject().getName(); CmsXmlControlFile.clearFileCache(currentProject + ":" + filename); } /** * When a resource has to be exported, the IDs inside the * Linkmanagement-Tags have to be changed to the corresponding URLs * * @param file is the file that has to be changed */ public CmsFile exportResource(CmsObject cms, CmsFile file) throws CmsException { //nothing to do here, because there couldnt be any Linkmanagement-Tags in a page-file (control-file) return file; } /** * When a resource has to be imported, the URLs of the * Links inside the resources have to be saved and changed to the corresponding IDs * * @param file is the file that has to be changed */ public CmsResource importResource(CmsObject cms, String source, String destination, String type, String user, String group, String access, Hashtable properties, String launcherStartClass, byte[] content, String importPath) throws CmsException { CmsFile file = null; String path = importPath + destination.substring(0, destination.lastIndexOf("/") + 1); String name = destination.substring((destination.lastIndexOf("/") + 1), destination.length()); int state = C_STATE_NEW; // this is a file // first delete the file, so it can be overwritten try { lockResource(cms, path + name, true); deleteResource(cms, path + name); state = C_STATE_CHANGED; } catch (CmsException exc) { state = C_STATE_NEW; // ignore the exception, the file dosen't exist } // now create the file // do not use createResource because then there will the body-file be created too. // that would cause an exception while importing because of trying to // duplicate an entry file = (CmsFile)cms.doCreateFile(path, name, content, type, properties); String fullname = file.getAbsolutePath(); lockResource(cms, fullname, true); try{ cms.doChmod(fullname, Integer.parseInt(access)); }catch(CmsException e){ System.out.println("chmod(" + access + ") failed "); } try{ cms.doChgrp(fullname, group); }catch(CmsException e){ System.out.println("chgrp(" + group + ") failed "); } try{ cms.doChown(fullname, user); }catch(CmsException e){ System.out.println("chown((" + user + ") failed "); } if(launcherStartClass != null){ file.setLauncherClassname(launcherStartClass); cms.writeFile(file); } return file; } /** * Locks a given resource. * <br> * A user can lock a resource, so he is the only one who can write this * resource. * * @param resource the complete path to the resource to lock. * @param force if force is <code>true</code>, a existing locking will be overwritten. * * @exception CmsException if the user has not the rights to lock this resource. * It will also be thrown, if there is a existing lock and force was set to false. */ public void lockResource(CmsObject cms, String resource, boolean force) throws CmsException{ // First read the page file. CmsFile pageFile = cms.readFile(resource); CmsUser pageLocker = null; CmsUser bodyLocker = null; // Check any locks on th page file pageLocker = getLockedBy(cms, resource); CmsUser currentUser = cms.getRequestContext().currentUser(); boolean pageLockedAndSelf = pageLocker != null && currentUser.equals(pageLocker); CmsResource bodyFile = null; String bodyPath = null; // Try to fetch the body file. try { bodyPath = readBodyPath(cms, pageFile); bodyFile = cms.readFileHeader(bodyPath); } catch(Exception e) { bodyPath = null; bodyFile = null; } // first lock the page file cms.doLockResource(resource, force); if(bodyFile != null) { // Everything with the page file is ok. We have write access. XML is valid. // Body file could be determined and fetched. // Now check further body file details (is it locked already, WHO has locked it, etc.) bodyLocker = getLockedBy(cms, bodyPath); // Lock the body, if neccessary //if((bodyLocker == null && (pageLocker == null || pageLockedAndSelf || force)) // || (bodyLocker != null && !currentUser.equals(bodyLocker) // && !(pageLocker != null && !currentUser.equals(pageLocker) && !force))) { cms.doLockResource(bodyPath, force); //} } /* // Lock the page file, if neccessary if(!(pageLockedAndSelf && (bodyFile != null && ((bodyLocker == null) || !currentUser.equals(bodyLocker))))) { cms.doLockResource(resource, force); } */ } /** * Moves a resource to the given destination. * * @param source the complete path of the sourcefile. * @param destination the complete path of the destinationfile. * * @exception CmsException if the user has not the rights to move this resource, * or if the file couldn't be moved. */ public void moveResource(CmsObject cms, String source, String destination) throws CmsException{ CmsFile file = cms.readFile(source); //String bodyPath = readBodyPath(cms, source); String bodyPath = checkBodyPath(cms, file); //int help = C_CONTENTBODYPATH.lastIndexOf("/"); //String hbodyPath=(C_CONTENTBODYPATH.substring(0,help)) + source; //if(hbodyPath.equals(bodyPath)) { if(bodyPath != null) { //help=bodyPath.lastIndexOf("/") + 1; //hbodyPath = bodyPath.substring(0,help) + destination; //String hbodyPath = bodyPath.substring(0, bodyPath.lastIndexOf("/")) + destination; String hbodyPath = C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")) + destination; checkFolders(cms, destination.substring(0, destination.lastIndexOf("/"))); cms.doMoveFile(bodyPath, hbodyPath); changeContent(cms, source, hbodyPath); } cms.doMoveFile(source, destination); } /** * Renames the file to the new name. * * @param oldname the complete path to the file which will be renamed. * @param newname the new name of the file. * * @exception CmsException if the user has not the rights * to rename the file, or if the file couldn't be renamed. */ public void renameResource(CmsObject cms, String oldname, String newname) throws CmsException{ CmsFile file = cms.readFile(oldname); String bodyPath = readBodyPath(cms, file); int help = C_CONTENTBODYPATH.lastIndexOf("/"); String hbodyPath=(C_CONTENTBODYPATH.substring(0,help)) + oldname; if(hbodyPath.equals(bodyPath)) { cms.doRenameFile(bodyPath, newname); help=bodyPath.lastIndexOf("/") + 1; hbodyPath = bodyPath.substring(0,help) + newname; changeContent(cms, oldname, hbodyPath); } cms.doRenameFile(oldname,newname); } /** * Restores a file in the current project with a version in the backup * * @param cms The CmsObject * @param versionId The version id of the resource * @param filename The name of the file to restore * * @exception CmsException Throws CmsException if operation was not succesful. */ public void restoreResource(CmsObject cms, int versionId, String filename) throws CmsException{ if(!cms.accessWrite(filename)){ throw new CmsException(filename, CmsException.C_NO_ACCESS); } CmsFile file = cms.readFile(filename); cms.doRestoreResource(versionId, filename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doRestoreResource(versionId, bodyPath); } } /** * Undo changes in a resource. * <br> * * @param resource the complete path to the resource to be restored. * * @exception CmsException if the user has not the rights * to write this resource. */ public void undoChanges(CmsObject cms, String resource) throws CmsException{ if(!cms.accessWrite(resource)){ throw new CmsException(resource, CmsException.C_NO_ACCESS); } CmsFile file = cms.readFile(resource); cms.doUndoChanges(resource); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doUndoChanges(bodyPath); } } /** * Unlocks a resource. * <br> * A user can unlock a resource, so other users may lock this file. * * @param resource the complete path to the resource to be unlocked. * * @exception CmsException if the user has not the rights * to unlock this resource. */ public void unlockResource(CmsObject cms, String resource) throws CmsException{ // First read the page file. CmsFile pageFile = cms.readFile(resource); CmsUser pageLocker = null; CmsUser bodyLocker = null; // Check any locks on th page file pageLocker = getLockedBy(cms, resource); CmsUser currentUser = cms.getRequestContext().currentUser(); CmsResource bodyFile = null; String bodyPath = null; // Try to fetch the body file. try { bodyPath = readBodyPath(cms, pageFile); bodyFile = cms.readFileHeader(bodyPath); } catch(Exception e) { bodyPath = null; bodyFile = null; } cms.doUnlockResource(resource); if(bodyFile != null) { // Everything with the page file is ok. We have write access. XML is valid. // Body file could be determined and fetched. // Now check further body file details (is it locked already, WHO has locked it, etc.) bodyLocker = getLockedBy(cms, bodyPath); // Unlock the body, if neccessary //if((pageLocker == null || pageLocker.equals(currentUser)) && (bodyLocker != null)) { cms.doUnlockResource(bodyPath); //} } // Unlock the page file, if neccessary //if(pageLocker != null || bodyLocker == null) { //cms.doUnlockResource(resource); //} } /** * method to check get the real body path from the content file * * @param cms The CmsObject, to access the XML read file. * @param file File in which the body path is stored. This should really * be a CmsFile object an not a file header. This won't be checked for * performance reasons. */ private String readBodyPath(CmsObject cms, CmsFile file) throws CmsException{ CmsXmlControlFile hXml=new CmsXmlControlFile(cms, file); String body = ""; try{ body = hXml.getElementTemplate("body"); } catch (CmsException exc){ // could not read body } return body; } /** * method to check get the real body path from the content file * * @param cms The CmsObject, to access the XML read file. * @param file File in which the body path is stored. */ private String checkBodyPath(CmsObject cms, CmsFile file) throws CmsException { String result =(C_CONTENTBODYPATH.substring(0, C_CONTENTBODYPATH.lastIndexOf("/")))+(file.getAbsolutePath()); if (!result.equals(readBodyPath(cms, (CmsFile)file))){ result = null; } return result; } private CmsUser getLockedBy(CmsObject cms, String filename) { CmsUser result = null; try { result = cms.lockedBy(filename); if(result.getId() == -1) { result = null; } } catch(Exception e) { result = null; } return result; } /** * This method changes the path of the body file in the xml conten file * if file type name is page * * @param cms The CmsObject * @param file The XML content file * @param bodypath the new XML content entry * @exception Exception if something goes wrong. */ private void changeContent(CmsObject cms, String filename, String bodypath) throws CmsException { CmsFile file=cms.readFile(filename); CmsXmlControlFile hXml=new CmsXmlControlFile(cms, file); hXml.setElementTemplate("body", bodypath); hXml.write(); } /** * This method checks if all nescessary folders are exisitng in the content body * folder and creates the missing ones. <br> * All page contents files are stored in the content body folder in a mirrored directory * structure of the OpenCms filesystem. Therefor it is nescessary to create the * missing folders when a new page document is createg. * @param cms The CmsObject * @param path The path in the CmsFilesystem where the new page should be created. * @exception CmsException if something goes wrong. */ private void checkFolders(CmsObject cms, String path) throws CmsException { String completePath=C_CONTENTBODYPATH; StringTokenizer t=new StringTokenizer(path,"/"); String correspFolder = "/"; // check if all folders are there while (t.hasMoreTokens()) { String foldername=t.nextToken(); correspFolder = correspFolder+foldername+"/"; try { // try to read the folder. if this fails, an exception is thrown cms.readFolder(completePath+foldername+"/"); } catch (CmsException e) { // the folder could not be read, so create it. String orgFolder=completePath+foldername+"/"; orgFolder=orgFolder.substring(C_CONTENTBODYPATH.length()-1); CmsFolder newfolder=cms.doCreateFolder(completePath,foldername); CmsFolder folder=cms.readFolder(orgFolder); cms.doLockResource(newfolder.getAbsolutePath(),false); cms.doChgrp(newfolder.getAbsolutePath(),cms.readGroup(folder).getName()); cms.doChmod(newfolder.getAbsolutePath(),folder.getAccessFlags()); cms.doChown(newfolder.getAbsolutePath(),cms.readOwner(folder).getName()); try{ CmsFolder correspondingFolder = cms.readFolder(correspFolder); if(!correspondingFolder.isLocked()){ cms.doUnlockResource(newfolder.getAbsolutePath()); } } catch (CmsException ex){ // unable to unlock folder if parent folder is locked } } completePath+=foldername+"/"; } } /** * Set the access flags of the copied resource to the default values. * @param cms The CmsObject. * @param filename The name of the file. * @exception Throws CmsException if something goes wrong. */ private void setDefaultFlags(CmsObject cms, String filename) throws CmsException { Hashtable startSettings=null; Integer accessFlags=null; startSettings=(Hashtable)cms.getRequestContext().currentUser().getAdditionalInfo(C_ADDITIONAL_INFO_STARTSETTINGS); if (startSettings != null) { accessFlags=(Integer)startSettings.get(C_START_ACCESSFLAGS); } if (accessFlags == null) { accessFlags = new Integer(C_ACCESS_DEFAULT_FLAGS); } chmod(cms, filename, accessFlags.intValue(), false); } /** * Changes the project-id of the resource to the new project * for publishing the resource directly * * @param newProjectId The Id of the new project * @param resourcename The name of the resource to change */ public void changeLockedInProject(CmsObject cms, int newProjectId, String resourcename) throws CmsException{ CmsFile file = cms.readFile(resourcename); cms.doChangeLockedInProject(newProjectId, resourcename); String bodyPath = checkBodyPath(cms, (CmsFile)file); if (bodyPath != null){ cms.doChangeLockedInProject(newProjectId, bodyPath); } // The page file contains XML. // So there could be some data in the parser's cache. // Clear it! String currentProject = cms.getRequestContext().currentProject().getName(); CmsXmlControlFile.clearFileCache(currentProject + ":" + resourcename); } }
Bugfix: when restore a page do not throw an exception if the body file could not be deleted.
src/com/opencms/file/CmsResourceTypePage.java
Bugfix: when restore a page do not throw an exception if the body file could not be deleted.
Java
apache-2.0
5d575db14294b9af17c58c8a985e9a95d7fa97a4
0
SerCeMan/intellij-community,fnouama/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,izonder/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,samthor/intellij-community,kdwink/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,petteyg/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,holmes/intellij-community,petteyg/intellij-community,retomerz/intellij-community,xfournet/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,consulo/consulo,amith01994/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,ibinti/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,tmpgit/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,ryano144/intellij-community,holmes/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,diorcety/intellij-community,joewalnes/idea-community,clumsy/intellij-community,caot/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,caot/intellij-community,hurricup/intellij-community,retomerz/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,joewalnes/idea-community,petteyg/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,clumsy/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,consulo/consulo,tmpgit/intellij-community,Lekanich/intellij-community,signed/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,samthor/intellij-community,FHannes/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,da1z/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,retomerz/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,samthor/intellij-community,fitermay/intellij-community,fnouama/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,signed/intellij-community,kdwink/intellij-community,allotria/intellij-community,asedunov/intellij-community,xfournet/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,amith01994/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,caot/intellij-community,clumsy/intellij-community,apixandru/intellij-community,izonder/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,kool79/intellij-community,fitermay/intellij-community,fitermay/intellij-community,amith01994/intellij-community,dslomov/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,ibinti/intellij-community,semonte/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,asedunov/intellij-community,holmes/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,slisson/intellij-community,kool79/intellij-community,joewalnes/idea-community,fengbaicanhe/intellij-community,fnouama/intellij-community,semonte/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,supersven/intellij-community,da1z/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,ernestp/consulo,wreckJ/intellij-community,kool79/intellij-community,FHannes/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,allotria/intellij-community,supersven/intellij-community,joewalnes/idea-community,adedayo/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,ryano144/intellij-community,blademainer/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,holmes/intellij-community,holmes/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,izonder/intellij-community,clumsy/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,caot/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,izonder/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,da1z/intellij-community,slisson/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,signed/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,ibinti/intellij-community,da1z/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,ernestp/consulo,signed/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,fnouama/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,clumsy/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,retomerz/intellij-community,retomerz/intellij-community,fitermay/intellij-community,caot/intellij-community,allotria/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,diorcety/intellij-community,allotria/intellij-community,ernestp/consulo,pwoodworth/intellij-community,petteyg/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,joewalnes/idea-community,tmpgit/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,asedunov/intellij-community,asedunov/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,vladmm/intellij-community,petteyg/intellij-community,samthor/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,samthor/intellij-community,xfournet/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,slisson/intellij-community,adedayo/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,allotria/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,dslomov/intellij-community,vladmm/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,slisson/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,kdwink/intellij-community,diorcety/intellij-community,fitermay/intellij-community,joewalnes/idea-community,fnouama/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,signed/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,youdonghai/intellij-community,allotria/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,joewalnes/idea-community,ibinti/intellij-community,semonte/intellij-community,ryano144/intellij-community,caot/intellij-community,asedunov/intellij-community,dslomov/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,caot/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,da1z/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,consulo/consulo,adedayo/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,da1z/intellij-community,signed/intellij-community,allotria/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,signed/intellij-community,caot/intellij-community,samthor/intellij-community,da1z/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,kdwink/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,kdwink/intellij-community,blademainer/intellij-community,apixandru/intellij-community,xfournet/intellij-community,adedayo/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,ernestp/consulo,wreckJ/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,adedayo/intellij-community,vladmm/intellij-community,fnouama/intellij-community,consulo/consulo,fitermay/intellij-community,signed/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,wreckJ/intellij-community,samthor/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,kool79/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,consulo/consulo,ryano144/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,holmes/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,semonte/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,supersven/intellij-community,akosyakov/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,fnouama/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,apixandru/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,caot/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,adedayo/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,kool79/intellij-community,diorcety/intellij-community,kool79/intellij-community,dslomov/intellij-community,asedunov/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,izonder/intellij-community,allotria/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,blademainer/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,signed/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,retomerz/intellij-community,izonder/intellij-community,slisson/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,samthor/intellij-community,signed/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,asedunov/intellij-community,apixandru/intellij-community,robovm/robovm-studio,holmes/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,clumsy/intellij-community,semonte/intellij-community,slisson/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,signed/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,FHannes/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,petteyg/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,nicolargo/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.rename; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.rename.naming.AutomaticRenamerFactory; import com.intellij.refactoring.ui.NameSuggestionsField; import com.intellij.refactoring.ui.RefactoringDialog; import com.intellij.refactoring.util.TextOccurrencesUtil; import com.intellij.ui.IdeBorderFactory; import com.intellij.ui.NonFocusableCheckBox; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.ArrayUtil; import com.intellij.xml.util.XmlTagUtilBase; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.util.*; public class RenameDialog extends RefactoringDialog { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.rename.RenameDialog"); private SuggestedNameInfo mySuggestedNameInfo; private JLabel myNameLabel; private NameSuggestionsField myNameSuggestionsField; private JCheckBox myCbSearchInComments; private JCheckBox myCbSearchTextOccurences; private final JLabel myNewNamePrefix = new JLabel(""); private final String myHelpID; private final PsiElement myPsiElement; private final PsiElement myNameSuggestionContext; private final Editor myEditor; private static final String REFACTORING_NAME = RefactoringBundle.message("rename.title"); private NameSuggestionsField.DataChanged myNameChangedListener; private final Map<AutomaticRenamerFactory, JCheckBox> myAutomaticRenamers = new HashMap<AutomaticRenamerFactory, JCheckBox>(); public RenameDialog(@NotNull Project project, @NotNull PsiElement psiElement, @Nullable PsiElement nameSuggestionContext, Editor editor) { super(project, true); assert psiElement.isValid(); myPsiElement = psiElement; myNameSuggestionContext = nameSuggestionContext; myEditor = editor; setTitle(REFACTORING_NAME); createNewNameComponent(); init(); myNameLabel.setText("<html>" + XmlTagUtilBase.escapeString(RefactoringBundle.message("rename.0.and.its.usages.to", getFullName()), false) + "</html>"); boolean toSearchInComments = isToSearchInCommentsForRename(); myCbSearchInComments.setSelected(toSearchInComments); if (myCbSearchTextOccurences.isEnabled()) { boolean toSearchForTextOccurences = isToSearchForTextOccurencesForRename(); myCbSearchTextOccurences.setSelected(toSearchForTextOccurences); } validateButtons(); myHelpID = RenamePsiElementProcessor.forElement(psiElement).getHelpID(psiElement); } protected void dispose() { myNameSuggestionsField.removeDataChangedListener(myNameChangedListener); super.dispose(); } protected boolean isToSearchForTextOccurencesForRename() { return RenamePsiElementProcessor.forElement(myPsiElement).isToSearchForTextOccurrences(myPsiElement); } protected boolean isToSearchInCommentsForRename() { return RenamePsiElementProcessor.forElement(myPsiElement).isToSearchInComments(myPsiElement); } private String getFullName() { final String name = UsageViewUtil.getDescriptiveName(myPsiElement); return (UsageViewUtil.getType(myPsiElement) + " " + name).trim(); } private void createNewNameComponent() { String[] suggestedNames = getSuggestedNames(); myNameSuggestionsField = new NameSuggestionsField(suggestedNames, myProject, FileTypes.PLAIN_TEXT, myEditor); if (myPsiElement instanceof PsiFile && myEditor == null) { myNameSuggestionsField.selectNameWithoutExtension(); } myNameChangedListener = new NameSuggestionsField.DataChanged() { public void dataChanged() { validateButtons(); } }; myNameSuggestionsField.addDataChangedListener(myNameChangedListener); myNameSuggestionsField.getComponent().registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent e) { completeVariable(myNameSuggestionsField.getEditor()); } }, KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, InputEvent.CTRL_MASK), JComponent.WHEN_IN_FOCUSED_WINDOW); } public String[] getSuggestedNames() { LinkedHashSet<String> result = new LinkedHashSet<String>(); final NameSuggestionProvider[] providers = Extensions.getExtensions(NameSuggestionProvider.EP_NAME); for(NameSuggestionProvider provider: providers) { SuggestedNameInfo info = provider.getSuggestedNames(myPsiElement, myNameSuggestionContext, result); if (info != null) mySuggestedNameInfo = info; } if (result.size() == 0) { result.add(UsageViewUtil.getShortName(myPsiElement)); } return ArrayUtil.toStringArray(result); } private void completeVariable(Editor editor) { final String prefix = myNameSuggestionsField.getEnteredName(); Collection<LookupElement> items = null; for(NameSuggestionProvider provider: Extensions.getExtensions(NameSuggestionProvider.EP_NAME)) { items = provider.completeName(myPsiElement, myNameSuggestionContext, prefix); if (items != null) break; } if (items != null) { final LookupElement[] lookupItems = items.toArray(new LookupElement[items.size()]); editor.getCaretModel().moveToOffset(prefix.length()); editor.getSelectionModel().removeSelection(); LookupManager.getInstance(myProject).showLookup(editor, lookupItems, prefix); } } public String getNewName() { return myNameSuggestionsField.getEnteredName().trim(); } public boolean isSearchInComments() { return myCbSearchInComments.isSelected(); } public boolean isSearchInNonJavaFiles() { return myCbSearchTextOccurences.isSelected(); } public JComponent getPreferredFocusedComponent() { return myNameSuggestionsField.getFocusableComponent(); } protected JComponent createCenterPanel() { return null; } protected JComponent createNorthPanel() { JPanel panel = new JPanel(new GridBagLayout()); GridBagConstraints gbConstraints = new GridBagConstraints(); panel.setBorder(IdeBorderFactory.createBorder()); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.weighty = 0; gbConstraints.weightx = 1; gbConstraints.gridwidth = GridBagConstraints.REMAINDER; gbConstraints.fill = GridBagConstraints.BOTH; myNameLabel = new JLabel(); panel.add(myNameLabel, gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, "".equals(myNewNamePrefix.getText()) ? 0 : 1); gbConstraints.gridwidth = 1; gbConstraints.fill = GridBagConstraints.NONE; gbConstraints.weightx = 0; gbConstraints.gridx = 0; gbConstraints.anchor = GridBagConstraints.WEST; panel.add(myNewNamePrefix, gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = 2; gbConstraints.fill = GridBagConstraints.BOTH; gbConstraints.weightx = 1; gbConstraints.gridx = 0; gbConstraints.weighty = 1; panel.add(myNameSuggestionsField.getComponent(), gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = 1; gbConstraints.gridx = 0; gbConstraints.weighty = 0; gbConstraints.weightx = 1; gbConstraints.fill = GridBagConstraints.BOTH; myCbSearchInComments = new NonFocusableCheckBox(); myCbSearchInComments.setText(RefactoringBundle.getSearchInCommentsAndStringsText()); myCbSearchInComments.setSelected(true); panel.add(myCbSearchInComments, gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = GridBagConstraints.REMAINDER; gbConstraints.gridx = 1; gbConstraints.weightx = 1; gbConstraints.fill = GridBagConstraints.BOTH; myCbSearchTextOccurences = new NonFocusableCheckBox(); myCbSearchTextOccurences.setText(RefactoringBundle.getSearchForTextOccurrencesText()); myCbSearchTextOccurences.setSelected(true); panel.add(myCbSearchTextOccurences, gbConstraints); if (!TextOccurrencesUtil.isSearchTextOccurencesEnabled(myPsiElement)) { myCbSearchTextOccurences.setEnabled(false); myCbSearchTextOccurences.setSelected(false); myCbSearchTextOccurences.setVisible(false); } for(AutomaticRenamerFactory factory: Extensions.getExtensions(AutomaticRenamerFactory.EP_NAME)) { if (factory.isApplicable(myPsiElement) && factory.getOptionName() != null) { gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = myAutomaticRenamers.size() % 2 == 0 ? 1 : GridBagConstraints.REMAINDER; gbConstraints.gridx = myAutomaticRenamers.size() % 2; gbConstraints.weightx = 1; gbConstraints.fill = GridBagConstraints.BOTH; JCheckBox checkBox = new NonFocusableCheckBox(); checkBox.setText(factory.getOptionName()); checkBox.setSelected(factory.isEnabled()); panel.add(checkBox, gbConstraints); myAutomaticRenamers.put(factory, checkBox); } } return panel; } protected void doHelpAction() { HelpManager.getInstance().invokeHelp(myHelpID); } protected void doAction() { LOG.assertTrue(myPsiElement.isValid()); final String newName = getNewName(); performRename(newName); } @TestOnly public void performRename(final String newName) { final RenamePsiElementProcessor elementProcessor = RenamePsiElementProcessor.forElement(myPsiElement); elementProcessor.setToSearchInComments(myPsiElement, isSearchInComments()); if (myCbSearchTextOccurences.isEnabled()) { elementProcessor.setToSearchForTextOccurrences(myPsiElement, isSearchInNonJavaFiles()); } if (mySuggestedNameInfo != null) { mySuggestedNameInfo.nameChoosen(newName); } final RenameProcessor processor = new RenameProcessor(getProject(), myPsiElement, newName, isSearchInComments(), isSearchInNonJavaFiles()); for(Map.Entry<AutomaticRenamerFactory, JCheckBox> e: myAutomaticRenamers.entrySet()) { e.getKey().setEnabled(e.getValue().isSelected()); if (e.getValue().isSelected()) { processor.addRenamerFactory(e.getKey()); } } invokeRefactoring(processor); } @Override protected void canRun() throws ConfigurationException { if (!areButtonsValid()) { throw new ConfigurationException("\'" + StringUtil.first(getNewName(), 10, true) + "\' is invalid identifier"); } } protected boolean areButtonsValid() { final String newName = getNewName(); return RenameUtil.isValidName(myProject, myPsiElement, newName); } }
platform/lang-impl/src/com/intellij/refactoring/rename/RenameDialog.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.rename; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.rename.naming.AutomaticRenamerFactory; import com.intellij.refactoring.ui.NameSuggestionsField; import com.intellij.refactoring.ui.RefactoringDialog; import com.intellij.refactoring.util.TextOccurrencesUtil; import com.intellij.ui.IdeBorderFactory; import com.intellij.ui.NonFocusableCheckBox; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.ArrayUtil; import com.intellij.xml.util.XmlTagUtilBase; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.util.*; public class RenameDialog extends RefactoringDialog { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.rename.RenameDialog"); private SuggestedNameInfo mySuggestedNameInfo; private JLabel myNameLabel; private NameSuggestionsField myNameSuggestionsField; private JCheckBox myCbSearchInComments; private JCheckBox myCbSearchTextOccurences; private final JLabel myNewNamePrefix = new JLabel(""); private final String myHelpID; private final PsiElement myPsiElement; private final PsiElement myNameSuggestionContext; private final Editor myEditor; private static final String REFACTORING_NAME = RefactoringBundle.message("rename.title"); private NameSuggestionsField.DataChanged myNameChangedListener; private final Map<AutomaticRenamerFactory, JCheckBox> myAutomaticRenamers = new HashMap<AutomaticRenamerFactory, JCheckBox>(); public RenameDialog(@NotNull Project project, @NotNull PsiElement psiElement, @Nullable PsiElement nameSuggestionContext, Editor editor) { super(project, true); assert psiElement.isValid(); myPsiElement = psiElement; myNameSuggestionContext = nameSuggestionContext; myEditor = editor; setTitle(REFACTORING_NAME); createNewNameComponent(); init(); myNameLabel.setText("<html>" + XmlTagUtilBase.escapeString(RefactoringBundle.message("rename.0.and.its.usages.to", getFullName()), false) + "</html>"); boolean toSearchInComments = isToSearchInCommentsForRename(); myCbSearchInComments.setSelected(toSearchInComments); if (myCbSearchTextOccurences.isEnabled()) { boolean toSearchForTextOccurences = isToSearchForTextOccurencesForRename(); myCbSearchTextOccurences.setSelected(toSearchForTextOccurences); } validateButtons(); myHelpID = RenamePsiElementProcessor.forElement(psiElement).getHelpID(psiElement); } protected void dispose() { myNameSuggestionsField.removeDataChangedListener(myNameChangedListener); super.dispose(); } protected boolean isToSearchForTextOccurencesForRename() { return RenamePsiElementProcessor.forElement(myPsiElement).isToSearchForTextOccurrences(myPsiElement); } protected boolean isToSearchInCommentsForRename() { return RenamePsiElementProcessor.forElement(myPsiElement).isToSearchInComments(myPsiElement); } private String getFullName() { final String name = UsageViewUtil.getDescriptiveName(myPsiElement); return (UsageViewUtil.getType(myPsiElement) + " " + name).trim(); } private void createNewNameComponent() { String[] suggestedNames = getSuggestedNames(); myNameSuggestionsField = new NameSuggestionsField(suggestedNames, myProject, FileTypes.PLAIN_TEXT, myEditor); if (myPsiElement instanceof PsiFile && myEditor == null) { myNameSuggestionsField.selectNameWithoutExtension(); } myNameChangedListener = new NameSuggestionsField.DataChanged() { public void dataChanged() { validateButtons(); } }; myNameSuggestionsField.addDataChangedListener(myNameChangedListener); myNameSuggestionsField.getComponent().registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent e) { completeVariable(myNameSuggestionsField.getEditor()); } }, KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, InputEvent.CTRL_MASK), JComponent.WHEN_IN_FOCUSED_WINDOW); } public String[] getSuggestedNames() { Set<String> result = new HashSet<String>(); final NameSuggestionProvider[] providers = Extensions.getExtensions(NameSuggestionProvider.EP_NAME); for(NameSuggestionProvider provider: providers) { SuggestedNameInfo info = provider.getSuggestedNames(myPsiElement, myNameSuggestionContext, result); if (info != null) mySuggestedNameInfo = info; } if (result.size() == 0) { result.add(UsageViewUtil.getShortName(myPsiElement)); } return ArrayUtil.toStringArray(result); } private void completeVariable(Editor editor) { final String prefix = myNameSuggestionsField.getEnteredName(); Collection<LookupElement> items = null; for(NameSuggestionProvider provider: Extensions.getExtensions(NameSuggestionProvider.EP_NAME)) { items = provider.completeName(myPsiElement, myNameSuggestionContext, prefix); if (items != null) break; } if (items != null) { final LookupElement[] lookupItems = items.toArray(new LookupElement[items.size()]); editor.getCaretModel().moveToOffset(prefix.length()); editor.getSelectionModel().removeSelection(); LookupManager.getInstance(myProject).showLookup(editor, lookupItems, prefix); } } public String getNewName() { return myNameSuggestionsField.getEnteredName().trim(); } public boolean isSearchInComments() { return myCbSearchInComments.isSelected(); } public boolean isSearchInNonJavaFiles() { return myCbSearchTextOccurences.isSelected(); } public JComponent getPreferredFocusedComponent() { return myNameSuggestionsField.getFocusableComponent(); } protected JComponent createCenterPanel() { return null; } protected JComponent createNorthPanel() { JPanel panel = new JPanel(new GridBagLayout()); GridBagConstraints gbConstraints = new GridBagConstraints(); panel.setBorder(IdeBorderFactory.createBorder()); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.weighty = 0; gbConstraints.weightx = 1; gbConstraints.gridwidth = GridBagConstraints.REMAINDER; gbConstraints.fill = GridBagConstraints.BOTH; myNameLabel = new JLabel(); panel.add(myNameLabel, gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, "".equals(myNewNamePrefix.getText()) ? 0 : 1); gbConstraints.gridwidth = 1; gbConstraints.fill = GridBagConstraints.NONE; gbConstraints.weightx = 0; gbConstraints.gridx = 0; gbConstraints.anchor = GridBagConstraints.WEST; panel.add(myNewNamePrefix, gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = 2; gbConstraints.fill = GridBagConstraints.BOTH; gbConstraints.weightx = 1; gbConstraints.gridx = 0; gbConstraints.weighty = 1; panel.add(myNameSuggestionsField.getComponent(), gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = 1; gbConstraints.gridx = 0; gbConstraints.weighty = 0; gbConstraints.weightx = 1; gbConstraints.fill = GridBagConstraints.BOTH; myCbSearchInComments = new NonFocusableCheckBox(); myCbSearchInComments.setText(RefactoringBundle.getSearchInCommentsAndStringsText()); myCbSearchInComments.setSelected(true); panel.add(myCbSearchInComments, gbConstraints); gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = GridBagConstraints.REMAINDER; gbConstraints.gridx = 1; gbConstraints.weightx = 1; gbConstraints.fill = GridBagConstraints.BOTH; myCbSearchTextOccurences = new NonFocusableCheckBox(); myCbSearchTextOccurences.setText(RefactoringBundle.getSearchForTextOccurrencesText()); myCbSearchTextOccurences.setSelected(true); panel.add(myCbSearchTextOccurences, gbConstraints); if (!TextOccurrencesUtil.isSearchTextOccurencesEnabled(myPsiElement)) { myCbSearchTextOccurences.setEnabled(false); myCbSearchTextOccurences.setSelected(false); myCbSearchTextOccurences.setVisible(false); } for(AutomaticRenamerFactory factory: Extensions.getExtensions(AutomaticRenamerFactory.EP_NAME)) { if (factory.isApplicable(myPsiElement) && factory.getOptionName() != null) { gbConstraints.insets = new Insets(4, 8, 4, 8); gbConstraints.gridwidth = myAutomaticRenamers.size() % 2 == 0 ? 1 : GridBagConstraints.REMAINDER; gbConstraints.gridx = myAutomaticRenamers.size() % 2; gbConstraints.weightx = 1; gbConstraints.fill = GridBagConstraints.BOTH; JCheckBox checkBox = new NonFocusableCheckBox(); checkBox.setText(factory.getOptionName()); checkBox.setSelected(factory.isEnabled()); panel.add(checkBox, gbConstraints); myAutomaticRenamers.put(factory, checkBox); } } return panel; } protected void doHelpAction() { HelpManager.getInstance().invokeHelp(myHelpID); } protected void doAction() { LOG.assertTrue(myPsiElement.isValid()); final String newName = getNewName(); performRename(newName); } @TestOnly public void performRename(final String newName) { final RenamePsiElementProcessor elementProcessor = RenamePsiElementProcessor.forElement(myPsiElement); elementProcessor.setToSearchInComments(myPsiElement, isSearchInComments()); if (myCbSearchTextOccurences.isEnabled()) { elementProcessor.setToSearchForTextOccurrences(myPsiElement, isSearchInNonJavaFiles()); } if (mySuggestedNameInfo != null) { mySuggestedNameInfo.nameChoosen(newName); } final RenameProcessor processor = new RenameProcessor(getProject(), myPsiElement, newName, isSearchInComments(), isSearchInNonJavaFiles()); for(Map.Entry<AutomaticRenamerFactory, JCheckBox> e: myAutomaticRenamers.entrySet()) { e.getKey().setEnabled(e.getValue().isSelected()); if (e.getValue().isSelected()) { processor.addRenamerFactory(e.getKey()); } } invokeRefactoring(processor); } @Override protected void canRun() throws ConfigurationException { if (!areButtonsValid()) { throw new ConfigurationException("\'" + StringUtil.first(getNewName(), 10, true) + "\' is invalid identifier"); } } protected boolean areButtonsValid() { final String newName = getNewName(); return RenameUtil.isValidName(myProject, myPsiElement, newName); } }
preserve suggestions order
platform/lang-impl/src/com/intellij/refactoring/rename/RenameDialog.java
preserve suggestions order
Java
apache-2.0
35e542d66a7044f8c42dd71f3aa88f6ea8166c28
0
NeoGridBR/bopepo
/* * Copyright 2008 JRimum Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by * applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS * OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. * * Created at: 30/03/2008 - 18:11:45 * * ================================================================================ * * Direitos autorais 2008 JRimum Project * * Licenciado sob a Licença Apache, Versão 2.0 ("LICENÇA"); você não pode usar * esse arquivo exceto em conformidade com a esta LICENÇA. Você pode obter uma * cópia desta LICENÇA em http://www.apache.org/licenses/LICENSE-2.0 A menos que * haja exigência legal ou acordo por escrito, a distribuição de software sob * esta LICENÇA se dará “COMO ESTÁ”, SEM GARANTIAS OU CONDIÇÕES DE QUALQUER * TIPO, sejam expressas ou tácitas. Veja a LICENÇA para a redação específica a * reger permissões e limitações sob esta LICENÇA. * * Criado em: 30/03/2008 - 18:11:45 * */ package org.jrimum.bopepo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.awt.Image; import java.math.BigDecimal; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import javax.imageio.ImageIO; import org.jrimum.bopepo.campolivre.CampoLivre; import org.jrimum.bopepo.campolivre.NotSupportedBancoException; import org.jrimum.bopepo.campolivre.NotSupportedCampoLivreException; import org.jrimum.domkee.financeiro.banco.febraban.Agencia; import org.jrimum.domkee.financeiro.banco.febraban.Carteira; import org.jrimum.domkee.financeiro.banco.febraban.Cedente; import org.jrimum.domkee.financeiro.banco.febraban.ContaBancaria; import org.jrimum.domkee.financeiro.banco.febraban.NumeroDaConta; import org.jrimum.domkee.financeiro.banco.febraban.Sacado; import org.jrimum.domkee.financeiro.banco.febraban.TipoDeMoeda; import org.jrimum.domkee.financeiro.banco.febraban.Titulo; import org.jrimum.utilix.ClassLoaders; import org.junit.Before; import org.junit.Test; /** * <p> * Teste da classe Boleto * </p> * * @author <a href="http://gilmatryx.googlepages.com/">Gilmar P.S.L.</a> * @author <a href="mailto:[email protected]">Misael Barreto</a> * @author <a href="mailto:[email protected]">Rômulo Augusto</a> * * @since 0.2 * * @version 0.2 */ public class TestBoleto{ private Titulo titulo; private Date VENCIMENTO = new GregorianCalendar(2000, Calendar.JULY, 3).getTime(); private Boleto boleto; @Before public void setUp() throws Exception { Sacado sacado = new Sacado("Sacado"); Cedente cedente = new Cedente("Cedente"); ContaBancaria contaBancaria = new ContaBancaria(); contaBancaria.setBanco(BancosSuportados.BANCO_BRADESCO.create()); Agencia agencia = new Agencia(1234, "1"); contaBancaria.setAgencia(agencia); contaBancaria.setCarteira(new Carteira(5)); NumeroDaConta numeroDaConta = new NumeroDaConta(); numeroDaConta.setCodigoDaConta(6789); contaBancaria.setNumeroDaConta(numeroDaConta); titulo = new Titulo(contaBancaria, sacado, cedente); titulo.setNossoNumero("12345678901"); titulo.setTipoDeMoeda(TipoDeMoeda.REAL); titulo.setValor(BigDecimal.valueOf(100.23)); titulo.setDataDoVencimento(VENCIMENTO); boleto = new Boleto(titulo); } /** * Test method for {@link org.jrimum.bopepo.Boleto#Boleto(Titulo)}. * @throws NotSupportedBancoException * @throws NotSupportedCampoLivreException */ @Test public void testGetInstance() throws NotSupportedBancoException, NotSupportedCampoLivreException { assertNotNull(boleto); assertNotNull(boleto.getTitulo()); try{ new Boleto(null); assertTrue(false); fail("Teste Falho!"); } catch(IllegalArgumentException illegalArgumentException){ assertTrue(true); } } /** * Test method for {@link org.jrimum.bopepo.Boleto#getCodigoDeBarras()}. */ @Test public void testGetCodigoDeBarra() { assertNotNull(boleto.getCodigoDeBarras()); } /** * Test method for {@link org.jrimum.bopepo.Boleto#getLinhaDigitavel()}. */ @Test public void testGetLinhaDigitavel() { assertNotNull(boleto.getLinhaDigitavel()); } /** * Test method for {@link org.jrimum.bopepo.Boleto#getDataDeProcessamento()}. */ @Test public void testGetDataDeProcessamento() { DateFormat df = new SimpleDateFormat("dd/MM/yyyy"); Date agora = new Date(); assertEquals(df.format(agora), df.format(boleto.getDataDeProcessamento())); } @Test(expected = IllegalArgumentException.class) public void testSetCampoLivreNull() { boleto = new Boleto(titulo, null); } @SuppressWarnings("serial") @Test public void testSetCampoLivreTamanhoCorreto() { boleto = new Boleto(titulo, new CampoLivre() { public String write() { return "1234567890123456789012345"; } public void read(String g) { } }); assertNotNull(boleto.getCampoLivre()); assertNotNull(boleto.getCampoLivre().write()); assertEquals(CampoLivre.STRING_LENGTH.intValue(), boleto.getCampoLivre().write().length()); } @Test(expected = IllegalArgumentException.class) public void testSetCampoLivreTamanhoMaior() { boleto = new Boleto(titulo, new CampoLivre() { private static final long serialVersionUID = 1L; public String write() { return "1234567890123456789012345000"; } public void read(String g) { } }); } @Test(expected = IllegalArgumentException.class) public void testSetCampoLivreTamanhoMenor() { boleto = new Boleto(titulo, new CampoLivre() { private static final long serialVersionUID = 1L; public String write() { return "12345678901234567890"; } public void read(String g) { } }); } //TODO /* @Test public void deve_sobrescrever_os_campos_texto_padrao_do_boleto() throws Exception { final String campoCendente = "txtFcCedente"; final String conteudoOriginal = "Banco dos Desenvolvedores"; final String conteudoSobrescrito = "Banco JRimum"; boleto.addTextosExtras(campoCendente, conteudoOriginal); assertEquals(boleto.getTextosExtras().get(campoCendente), conteudoOriginal); boleto.sobrescrevaCampo(BoletoCampo.txtFcCedente, conteudoSobrescrito); assertEquals(boleto.getTextosExtras().get(campoCendente), conteudoSobrescrito); } */ @Test public void deve_adicionar_campos_texto_ao_boleto() throws Exception { final String campo = "meuCampo"; final String conteudo = "Meu conteudo especial!"; boleto.addTextosExtras(campo, conteudo); assertEquals(boleto.getTextosExtras().get(campo), conteudo); } @Test public void deve_adicionar_campos_imagem_ao_boleto() throws Exception { final String campo = "meuCampo"; final Image conteudo = ImageIO.read(ClassLoaders.getResource("img/001.png")); assertNotNull(conteudo); boleto.addImagensExtras(campo, conteudo); assertEquals(boleto.getImagensExtras().get(campo), conteudo); } }
src/test/java/org/jrimum/bopepo/TestBoleto.java
/* * Copyright 2008 JRimum Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by * applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS * OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. * * Created at: 30/03/2008 - 18:11:45 * * ================================================================================ * * Direitos autorais 2008 JRimum Project * * Licenciado sob a Licença Apache, Versão 2.0 ("LICENÇA"); você não pode usar * esse arquivo exceto em conformidade com a esta LICENÇA. Você pode obter uma * cópia desta LICENÇA em http://www.apache.org/licenses/LICENSE-2.0 A menos que * haja exigência legal ou acordo por escrito, a distribuição de software sob * esta LICENÇA se dará “COMO ESTÁ”, SEM GARANTIAS OU CONDIÇÕES DE QUALQUER * TIPO, sejam expressas ou tácitas. Veja a LICENÇA para a redação específica a * reger permissões e limitações sob esta LICENÇA. * * Criado em: 30/03/2008 - 18:11:45 * */ package org.jrimum.bopepo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.awt.Image; import java.math.BigDecimal; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import javax.imageio.ImageIO; import org.jrimum.bopepo.campolivre.CampoLivre; import org.jrimum.bopepo.campolivre.NotSupportedBancoException; import org.jrimum.bopepo.campolivre.NotSupportedCampoLivreException; import org.jrimum.domkee.financeiro.banco.febraban.Agencia; import org.jrimum.domkee.financeiro.banco.febraban.Carteira; import org.jrimum.domkee.financeiro.banco.febraban.Cedente; import org.jrimum.domkee.financeiro.banco.febraban.ContaBancaria; import org.jrimum.domkee.financeiro.banco.febraban.NumeroDaConta; import org.jrimum.domkee.financeiro.banco.febraban.Sacado; import org.jrimum.domkee.financeiro.banco.febraban.TipoDeMoeda; import org.jrimum.domkee.financeiro.banco.febraban.Titulo; import org.jrimum.utilix.ClassLoaders; import org.junit.Before; import org.junit.Test; /** * <p> * Teste da classe Boleto * </p> * * @author <a href="http://gilmatryx.googlepages.com/">Gilmar P.S.L.</a> * @author <a href="mailto:[email protected]">Misael Barreto</a> * @author <a href="mailto:[email protected]">Rômulo Augusto</a> * * @since 0.2 * * @version 0.2 */ public class TestBoleto{ private Titulo titulo; private Date VENCIMENTO = new GregorianCalendar(2000, Calendar.JULY, 3).getTime(); private Boleto boleto; @Before public void setUp() throws Exception { Sacado sacado = new Sacado("Sacado"); Cedente cedente = new Cedente("Cedente"); ContaBancaria contaBancaria = new ContaBancaria(); contaBancaria.setBanco(BancosSuportados.BANCO_BRADESCO.create()); Agencia agencia = new Agencia(1234, "1"); contaBancaria.setAgencia(agencia); contaBancaria.setCarteira(new Carteira(5)); NumeroDaConta numeroDaConta = new NumeroDaConta(); numeroDaConta.setCodigoDaConta(6789); contaBancaria.setNumeroDaConta(numeroDaConta); titulo = new Titulo(contaBancaria, sacado, cedente); titulo.setNossoNumero("12345678901"); titulo.setTipoDeMoeda(TipoDeMoeda.REAL); titulo.setValor(BigDecimal.valueOf(100.23)); titulo.setDataDoVencimento(VENCIMENTO); boleto = new Boleto(titulo); } /** * Test method for {@link org.jrimum.bopepo.Boleto#Boleto(Titulo)}. * @throws NotSupportedBancoException * @throws NotSupportedCampoLivreException */ @Test public void testGetInstance() throws NotSupportedBancoException, NotSupportedCampoLivreException { assertNotNull(boleto); assertNotNull(boleto.getTitulo()); try{ new Boleto(null); assertTrue(false); fail("Teste Falho!"); } catch(IllegalArgumentException illegalArgumentException){ assertTrue(true); } } /** * Test method for {@link org.jrimum.bopepo.Boleto#getCodigoDeBarras()}. */ @Test public void testGetCodigoDeBarra() { assertNotNull(boleto.getCodigoDeBarras()); } /** * Test method for {@link org.jrimum.bopepo.Boleto#getLinhaDigitavel()}. */ @Test public void testGetLinhaDigitavel() { assertNotNull(boleto.getLinhaDigitavel()); } /** * Test method for {@link org.jrimum.bopepo.Boleto#getDataDeProcessamento()}. */ @Test public void testGetDataDeProcessamento() { DateFormat df = new SimpleDateFormat("dd/MM/yyyy"); Date agora = new Date(); assertEquals(df.format(agora), df.format(boleto.getDataDeProcessamento())); } @Test(expected = IllegalArgumentException.class) public void testSetCampoLivreNull() { boleto = new Boleto(titulo, null); } @SuppressWarnings("serial") @Test public void testSetCampoLivreTamanhoCorreto() { boleto = new Boleto(titulo, new CampoLivre() { public String write() { return "1234567890123456789012345"; } public void read(String g) { } }); assertNotNull(boleto.getCampoLivre()); assertNotNull(boleto.getCampoLivre().write()); assertEquals(CampoLivre.STRING_LENGTH.intValue(), boleto.getCampoLivre().write().length()); } @Test(expected = IllegalArgumentException.class) public void testSetCampoLivreTamanhoMaior() { boleto = new Boleto(titulo, new CampoLivre() { private static final long serialVersionUID = 1L; public String write() { return "1234567890123456789012345000"; } public void read(String g) { } }); } @Test(expected = IllegalArgumentException.class) public void testSetCampoLivreTamanhoMenor() { boleto = new Boleto(titulo, new CampoLivre() { private static final long serialVersionUID = 1L; public String write() { return "12345678901234567890"; } public void read(String g) { } }); } @Test public void deve_sobrescrever_os_campos_texto_padrao_do_boleto() throws Exception { final String campoCendente = "txtFcCedente"; final String conteudoOriginal = "Banco dos Desenvolvedores"; final String conteudoSobrescrito = "Banco JRimum"; boleto.addTextosExtras(campoCendente, conteudoOriginal); assertEquals(boleto.getTextosExtras().get(campoCendente), conteudoOriginal); //TODO boleto.sobrescrevaCampo(BoletoCampo.txtFcCedente, conteudoSobrescrito); assertEquals(boleto.getTextosExtras().get(campoCendente), conteudoSobrescrito); } @Test public void deve_adicionar_campos_texto_ao_boleto() throws Exception { final String campo = "meuCampo"; final String conteudo = "Meu conteudo especial!"; boleto.addTextosExtras(campo, conteudo); assertEquals(boleto.getTextosExtras().get(campo), conteudo); } @Test public void deve_adicionar_campos_imagem_ao_boleto() throws Exception { final String campo = "meuCampo"; final Image conteudo = ImageIO.read(ClassLoaders.getResource("img/001.png")); assertNotNull(conteudo); boleto.addImagensExtras(campo, conteudo); assertEquals(boleto.getImagensExtras().get(campo), conteudo); } }
Ajuste de teste devido a remoção dos códigos de view.
src/test/java/org/jrimum/bopepo/TestBoleto.java
Ajuste de teste devido a remoção dos códigos de view.
Java
apache-2.0
1513bd73417c79c3e4cd22a6bafa4b17519ee37d
0
holmes/intellij-community,izonder/intellij-community,robovm/robovm-studio,holmes/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,petteyg/intellij-community,ryano144/intellij-community,samthor/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,xfournet/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,ibinti/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,slisson/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,vladmm/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,samthor/intellij-community,holmes/intellij-community,orekyuu/intellij-community,allotria/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,fitermay/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,signed/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,FHannes/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,hurricup/intellij-community,holmes/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,jagguli/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,apixandru/intellij-community,vladmm/intellij-community,robovm/robovm-studio,retomerz/intellij-community,fnouama/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,caot/intellij-community,kool79/intellij-community,caot/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,blademainer/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,kdwink/intellij-community,supersven/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,semonte/intellij-community,ryano144/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,semonte/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,allotria/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,samthor/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,fitermay/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,holmes/intellij-community,blademainer/intellij-community,diorcety/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,slisson/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,izonder/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,izonder/intellij-community,clumsy/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,da1z/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,diorcety/intellij-community,caot/intellij-community,ryano144/intellij-community,diorcety/intellij-community,fitermay/intellij-community,supersven/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,slisson/intellij-community,allotria/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,supersven/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,semonte/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,nicolargo/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,supersven/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,semonte/intellij-community,petteyg/intellij-community,samthor/intellij-community,wreckJ/intellij-community,slisson/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,semonte/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,dslomov/intellij-community,slisson/intellij-community,slisson/intellij-community,caot/intellij-community,ol-loginov/intellij-community,signed/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,suncycheng/intellij-community,da1z/intellij-community,clumsy/intellij-community,amith01994/intellij-community,asedunov/intellij-community,retomerz/intellij-community,caot/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,amith01994/intellij-community,petteyg/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,da1z/intellij-community,supersven/intellij-community,signed/intellij-community,FHannes/intellij-community,dslomov/intellij-community,asedunov/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,caot/intellij-community,da1z/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,ibinti/intellij-community,FHannes/intellij-community,robovm/robovm-studio,izonder/intellij-community,semonte/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,semonte/intellij-community,caot/intellij-community,allotria/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,signed/intellij-community,petteyg/intellij-community,jagguli/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,FHannes/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,dslomov/intellij-community,holmes/intellij-community,akosyakov/intellij-community,kool79/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,holmes/intellij-community,da1z/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,hurricup/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,samthor/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,supersven/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,retomerz/intellij-community,da1z/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,da1z/intellij-community,robovm/robovm-studio,da1z/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,petteyg/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,holmes/intellij-community,semonte/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,petteyg/intellij-community,hurricup/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,allotria/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,xfournet/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,adedayo/intellij-community,diorcety/intellij-community,caot/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,vladmm/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,allotria/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,fitermay/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,kool79/intellij-community,apixandru/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,supersven/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,asedunov/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,semonte/intellij-community,signed/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,kool79/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,jagguli/intellij-community,asedunov/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,izonder/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,dslomov/intellij-community,amith01994/intellij-community,vladmm/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,samthor/intellij-community,diorcety/intellij-community,amith01994/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,vladmm/intellij-community,kool79/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,caot/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,youdonghai/intellij-community,signed/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,signed/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,samthor/intellij-community,orekyuu/intellij-community,semonte/intellij-community,petteyg/intellij-community,asedunov/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community
package com.jetbrains.python.newProject.actions; import com.intellij.facet.ui.ValidationResult; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.util.projectWizard.WebProjectTemplate; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.actionSystem.impl.ActionButtonWithText; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.ui.ValidationInfo; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.wm.impl.welcomeScreen.AbstractActionWithPanel; import com.intellij.platform.DirectoryProjectGenerator; import com.intellij.platform.WebProjectGenerator; import com.intellij.ui.DocumentAdapter; import com.intellij.ui.JBColor; import com.intellij.ui.components.JBCheckBox; import com.intellij.util.NullableConsumer; import com.intellij.util.ui.CenteredIcon; import com.jetbrains.python.PythonSdkChooserCombo; import com.jetbrains.python.configuration.PyConfigurableInterpreterList; import com.jetbrains.python.configuration.VirtualEnvProjectFilter; import com.jetbrains.python.newProject.PyFrameworkProjectGenerator; import com.jetbrains.python.packaging.PyExternalProcessException; import com.jetbrains.python.packaging.PyPackage; import com.jetbrains.python.packaging.PyPackageManager; import com.jetbrains.python.packaging.PyPackageManagerImpl; import com.jetbrains.python.sdk.PythonSdkType; import com.jetbrains.python.sdk.flavors.JythonSdkFlavor; import com.jetbrains.python.sdk.flavors.PyPySdkFlavor; import com.jetbrains.python.sdk.flavors.PythonSdkFlavor; import icons.PythonIcons; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.event.DocumentEvent; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.util.List; abstract public class AbstractProjectSettingsStep extends AbstractActionWithPanel implements DumbAware { protected final DirectoryProjectGenerator myProjectGenerator; private final NullableConsumer<AbstractProjectSettingsStep> myCallback; private PythonSdkChooserCombo mySdkCombo; private JCheckBox myFrameworkCheckbox; private boolean myInstallFrameworkChanged; private TextFieldWithBrowseButton myLocationField; protected final File myProjectDirectory; private Button myCreateButton; private JLabel myErrorLabel; private AnAction myCreateAction; public AbstractProjectSettingsStep(DirectoryProjectGenerator projectGenerator, NullableConsumer<AbstractProjectSettingsStep> callback) { super(); myProjectGenerator = projectGenerator; myCallback = callback; myProjectDirectory = FileUtil.findSequentNonexistentFile(new File(ProjectUtil.getBaseDir()), "untitled", ""); if (myProjectGenerator instanceof WebProjectTemplate) { ((WebProjectTemplate)myProjectGenerator).getPeer().addSettingsStateListener(new WebProjectGenerator.SettingsStateListener() { @Override public void stateChanged(boolean validSettings) { checkValid(); if (validSettings) { setErrorText(null); } } }); } myCreateAction = new AnAction("Create ", "Create Project", getIcon()) { @Override public void actionPerformed(AnActionEvent e) { boolean isOk; try { isOk = checkValid(); if (myProjectGenerator instanceof WebProjectTemplate) { final ValidationInfo validationInfo = ((WebProjectTemplate)myProjectGenerator).getPeer().validate(); isOk = validationInfo == null; if (!isOk) { setErrorText(validationInfo.message); } } } catch (RuntimeException e1) { isOk = false; setErrorText(e1.getMessage()); } if (isOk && myCallback != null) myCallback.consume(AbstractProjectSettingsStep.this); } }; } @Override public void actionPerformed(AnActionEvent e) { } @Override public JPanel createPanel() { final JPanel mainPanel = new JPanel(new BorderLayout()); mainPanel.setPreferredSize(new Dimension(mainPanel.getPreferredSize().width, 450)); final JPanel panel = createBasePanel(); mainPanel.add(panel, BorderLayout.NORTH); final JPanel advancedSettings = createAdvancedSettings(); if (advancedSettings != null) mainPanel.add(advancedSettings, BorderLayout.CENTER); final JPanel bottomPanel = new JPanel(new BorderLayout()); myCreateButton = new Button(myCreateAction, myCreateAction.getTemplatePresentation()); bottomPanel.setPreferredSize(new Dimension(mainPanel.getPreferredSize().width, 40)); myCreateButton.setPreferredSize(new Dimension(mainPanel.getPreferredSize().width, 40)); myErrorLabel = new JLabel(""); myErrorLabel.setForeground(JBColor.RED); bottomPanel.add(myErrorLabel, BorderLayout.WEST); bottomPanel.add(myCreateButton, BorderLayout.CENTER); mainPanel.add(bottomPanel, BorderLayout.SOUTH); return mainPanel; } protected Icon getIcon() { return myProjectGenerator.getLogo(); } private JPanel createBasePanel() { final JPanel panel = new JPanel(new GridBagLayout()); final GridBagConstraints c = new GridBagConstraints(); c.fill = GridBagConstraints.HORIZONTAL; c.anchor = GridBagConstraints.NORTHWEST; c.weightx = 0; c.insets = new Insets(2, 2, 2, 2); myLocationField = new TextFieldWithBrowseButton(); myLocationField.setText(myProjectDirectory.toString()); final FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createSingleFolderDescriptor(); myLocationField.addBrowseFolderListener("Select base directory", "Select base directory for the Project", null, descriptor); final JLabel locationLabel = new JLabel("Location:"); c.gridx = 0; c.gridy = 0; panel.add(locationLabel, c); c.gridx = 1; c.gridy = 0; c.weightx = 1.; panel.add(myLocationField, c); final JLabel interpreterLabel = new JLabel("Interpreter:", SwingConstants.LEFT) { @Override public Dimension getMinimumSize() { return new JLabel("Project name:").getPreferredSize(); } @Override public Dimension getPreferredSize() { return getMinimumSize(); } }; c.gridx = 0; c.gridy = 1; c.weightx = 0; panel.add(interpreterLabel, c); final Project project = ProjectManager.getInstance().getDefaultProject(); final List<Sdk> sdks = PyConfigurableInterpreterList.getInstance(project).getAllPythonSdks(); VirtualEnvProjectFilter.removeAllAssociated(sdks); final Sdk preferred = sdks.isEmpty() ? null : sdks.iterator().next(); mySdkCombo = new PythonSdkChooserCombo(project, sdks, new Condition<Sdk>() { @Override public boolean value(Sdk sdk) { return sdk == preferred; } }); mySdkCombo.setButtonIcon(PythonIcons.Python.InterpreterGear); c.gridx = 1; c.gridy = 1; c.weightx = 1.; panel.add(mySdkCombo, c); myFrameworkCheckbox = new JBCheckBox("Install <framework>"); c.gridx = 0; c.gridy = 2; c.gridwidth = 2; c.weightx = 0.0; panel.add(myFrameworkCheckbox, c); myFrameworkCheckbox.setVisible(false); registerValidators(); return panel; } protected void registerValidators() { myFrameworkCheckbox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { myInstallFrameworkChanged = true; checkValid(); } }); myLocationField.getTextField().getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(DocumentEvent e) { checkValid(); } }); final ActionListener listener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { checkValid(); } }; mySdkCombo.getComboBox().addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent event) { checkValid(); } }); myLocationField.getTextField().addActionListener(listener); mySdkCombo.getComboBox().addActionListener(listener); mySdkCombo.addActionListener(listener); } public boolean checkValid() { final String projectName = myLocationField.getText(); if (projectName.trim().isEmpty()) { setErrorText("Project name can't be empty"); return false; } if (myLocationField.getText().indexOf('$') >= 0) { setErrorText("Project directory name must not contain the $ character"); return false; } if (myProjectGenerator != null) { final String baseDirPath = myLocationField.getTextField().getText(); ValidationResult validationResult = myProjectGenerator.validate(baseDirPath); if (!validationResult.isOk()) { setErrorText(validationResult.getErrorMessage()); return false; } if (myProjectGenerator instanceof WebProjectTemplate) { final ValidationInfo validationInfo = ((WebProjectTemplate)myProjectGenerator).getPeer().validate(); if (validationInfo != null) { setErrorText(validationInfo.message); return false; } } } final Sdk sdk = getSdk(); setErrorText(null); myFrameworkCheckbox.setVisible(false); final boolean isPy3k = sdk != null && PythonSdkType.getLanguageLevelForSdk(sdk).isPy3K(); if (sdk != null && PythonSdkType.isRemote(sdk) && !acceptsRemoteSdk(myProjectGenerator)) { setErrorText("Please choose a local interpreter"); return false; } else if (myProjectGenerator instanceof PyFrameworkProjectGenerator) { PyFrameworkProjectGenerator frameworkProjectGenerator = (PyFrameworkProjectGenerator)myProjectGenerator; String frameworkName = frameworkProjectGenerator.getFrameworkTitle(); if (sdk != null && !isFrameworkInstalled(sdk)) { final PyPackageManagerImpl packageManager = (PyPackageManagerImpl)PyPackageManager.getInstance(sdk); final boolean onlyWithCache = PythonSdkFlavor.getFlavor(sdk) instanceof JythonSdkFlavor || PythonSdkFlavor.getFlavor(sdk) instanceof PyPySdkFlavor; try { if (onlyWithCache && packageManager.cacheIsNotNull() || !onlyWithCache) { final PyPackage pip = packageManager.findPackage("pip"); myFrameworkCheckbox.setText("Install " + frameworkName); myFrameworkCheckbox.setMnemonic(frameworkName.charAt(0)); myFrameworkCheckbox.setVisible(pip != null); if (!myInstallFrameworkChanged) { myFrameworkCheckbox.setSelected(pip != null); } } } catch (PyExternalProcessException e) { myFrameworkCheckbox.setVisible(false); } if (!myFrameworkCheckbox.isSelected()) { setErrorText("No " + frameworkName + " support installed in selected interpreter"); return false; } } if (isPy3k && !((PyFrameworkProjectGenerator)myProjectGenerator).supportsPython3()) { setErrorText(frameworkName + " is not supported for the selected interpreter"); return false; } } if (sdk == null) { setErrorText("No Python interpreter selected"); return false; } return true; } public void setErrorText(@Nullable String text) { myErrorLabel.setText(text); myCreateButton.setEnabled(text == null); } public void selectCompatiblePython() { DirectoryProjectGenerator generator = getProjectGenerator(); if (generator instanceof PyFrameworkProjectGenerator && !((PyFrameworkProjectGenerator)generator).supportsPython3()) { Sdk sdk = getSdk(); if (sdk != null && PythonSdkType.getLanguageLevelForSdk(sdk).isPy3K()) { Sdk python2Sdk = PythonSdkType.findPython2Sdk(null); if (python2Sdk != null) { mySdkCombo.getComboBox().setSelectedItem(python2Sdk); mySdkCombo.getComboBox().repaint(); } } } } private static boolean acceptsRemoteSdk(DirectoryProjectGenerator generator) { if (generator instanceof PyFrameworkProjectGenerator) { return ((PyFrameworkProjectGenerator)generator).acceptsRemoteSdk(); } return true; } private boolean isFrameworkInstalled(Sdk sdk) { PyFrameworkProjectGenerator projectGenerator = (PyFrameworkProjectGenerator)getProjectGenerator(); return projectGenerator != null && projectGenerator.isFrameworkInstalled(sdk); } @Nullable protected JPanel createAdvancedSettings() { return null; } public DirectoryProjectGenerator getProjectGenerator() { return myProjectGenerator; } private static class Button extends ActionButtonWithText { private static final Icon DEFAULT_ICON = PythonIcons.Python.Python; public Button(AnAction action, Presentation presentation) { super(action, wrapIcon(presentation), "NewProject", new Dimension(32, 32)); setBorder(new EmptyBorder(3, 3, 3, 3)); } @Override public String getToolTipText() { return null; } @Override protected int horizontalTextAlignment() { return SwingConstants.RIGHT; } @Override protected int iconTextSpace() { return 8; } private static Presentation wrapIcon(Presentation presentation) { Icon original = presentation.getIcon(); CenteredIcon centered = new CenteredIcon(original != null ? original : DEFAULT_ICON, 40, 40, false); presentation.setIcon(centered); return presentation; } } public Sdk getSdk() { return (Sdk)mySdkCombo.getComboBox().getSelectedItem(); } public String getProjectLocation() { return myLocationField.getText(); } public boolean installFramework() { return myFrameworkCheckbox.isSelected() && myFrameworkCheckbox.isVisible(); } }
python/ide/src/com/jetbrains/python/newProject/actions/AbstractProjectSettingsStep.java
package com.jetbrains.python.newProject.actions; import com.intellij.facet.ui.ValidationResult; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.util.projectWizard.WebProjectTemplate; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.actionSystem.impl.ActionButtonWithText; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.ui.ValidationInfo; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.wm.impl.welcomeScreen.AbstractActionWithPanel; import com.intellij.platform.DirectoryProjectGenerator; import com.intellij.platform.WebProjectGenerator; import com.intellij.ui.DocumentAdapter; import com.intellij.ui.JBColor; import com.intellij.ui.components.JBCheckBox; import com.intellij.util.NullableConsumer; import com.intellij.util.ui.CenteredIcon; import com.jetbrains.python.PythonSdkChooserCombo; import com.jetbrains.python.configuration.PyConfigurableInterpreterList; import com.jetbrains.python.configuration.VirtualEnvProjectFilter; import com.jetbrains.python.newProject.PyFrameworkProjectGenerator; import com.jetbrains.python.packaging.PyExternalProcessException; import com.jetbrains.python.packaging.PyPackage; import com.jetbrains.python.packaging.PyPackageManager; import com.jetbrains.python.packaging.PyPackageManagerImpl; import com.jetbrains.python.sdk.PythonSdkType; import com.jetbrains.python.sdk.flavors.JythonSdkFlavor; import com.jetbrains.python.sdk.flavors.PyPySdkFlavor; import com.jetbrains.python.sdk.flavors.PythonSdkFlavor; import icons.PythonIcons; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.event.DocumentEvent; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.util.List; abstract public class AbstractProjectSettingsStep extends AbstractActionWithPanel implements DumbAware { protected final DirectoryProjectGenerator myProjectGenerator; private final NullableConsumer<AbstractProjectSettingsStep> myCallback; private PythonSdkChooserCombo mySdkCombo; private JCheckBox myFrameworkCheckbox; private boolean myInstallFrameworkChanged; private TextFieldWithBrowseButton myLocationField; protected final File myProjectDirectory; private Button myCreateButton; private JLabel myErrorLabel; private AnAction myCreateAction; public AbstractProjectSettingsStep(DirectoryProjectGenerator projectGenerator, NullableConsumer<AbstractProjectSettingsStep> callback) { super(); myProjectGenerator = projectGenerator; myCallback = callback; myProjectDirectory = FileUtil.findSequentNonexistentFile(new File(ProjectUtil.getBaseDir()), "untitled", ""); if (myProjectGenerator instanceof WebProjectTemplate) { ((WebProjectTemplate)myProjectGenerator).getPeer().addSettingsStateListener(new WebProjectGenerator.SettingsStateListener() { @Override public void stateChanged(boolean validSettings) { if (validSettings) { setErrorText(null); } } }); } myCreateAction = new AnAction("Create ", "Create Project", getIcon()) { @Override public void actionPerformed(AnActionEvent e) { boolean isOk; try { isOk = checkValid(); if (myProjectGenerator instanceof WebProjectTemplate) { final ValidationInfo validationInfo = ((WebProjectTemplate)myProjectGenerator).getPeer().validate(); isOk = validationInfo == null; if (!isOk) { setErrorText(validationInfo.message); } } } catch (RuntimeException e1) { isOk = false; setErrorText(e1.getMessage()); } if (isOk && myCallback != null) myCallback.consume(AbstractProjectSettingsStep.this); } }; } @Override public void actionPerformed(AnActionEvent e) { } @Override public JPanel createPanel() { final JPanel mainPanel = new JPanel(new BorderLayout()); mainPanel.setPreferredSize(new Dimension(mainPanel.getPreferredSize().width, 450)); final JPanel panel = createBasePanel(); mainPanel.add(panel, BorderLayout.NORTH); final JPanel advancedSettings = createAdvancedSettings(); if (advancedSettings != null) mainPanel.add(advancedSettings, BorderLayout.CENTER); final JPanel bottomPanel = new JPanel(new BorderLayout()); myCreateButton = new Button(myCreateAction, myCreateAction.getTemplatePresentation()); bottomPanel.setPreferredSize(new Dimension(mainPanel.getPreferredSize().width, 40)); myCreateButton.setPreferredSize(new Dimension(mainPanel.getPreferredSize().width, 40)); myErrorLabel = new JLabel(""); myErrorLabel.setForeground(JBColor.RED); bottomPanel.add(myErrorLabel, BorderLayout.WEST); bottomPanel.add(myCreateButton, BorderLayout.CENTER); mainPanel.add(bottomPanel, BorderLayout.SOUTH); return mainPanel; } protected Icon getIcon() { return myProjectGenerator.getLogo(); } private JPanel createBasePanel() { final JPanel panel = new JPanel(new GridBagLayout()); final GridBagConstraints c = new GridBagConstraints(); c.fill = GridBagConstraints.HORIZONTAL; c.anchor = GridBagConstraints.NORTHWEST; c.weightx = 0; c.insets = new Insets(2, 2, 2, 2); myLocationField = new TextFieldWithBrowseButton(); myLocationField.setText(myProjectDirectory.toString()); final FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createSingleFolderDescriptor(); myLocationField.addBrowseFolderListener("Select base directory", "Select base directory for the Project", null, descriptor); final JLabel locationLabel = new JLabel("Location:"); c.gridx = 0; c.gridy = 0; panel.add(locationLabel, c); c.gridx = 1; c.gridy = 0; c.weightx = 1.; panel.add(myLocationField, c); final JLabel interpreterLabel = new JLabel("Interpreter:", SwingConstants.LEFT) { @Override public Dimension getMinimumSize() { return new JLabel("Project name:").getPreferredSize(); } @Override public Dimension getPreferredSize() { return getMinimumSize(); } }; c.gridx = 0; c.gridy = 1; c.weightx = 0; panel.add(interpreterLabel, c); final Project project = ProjectManager.getInstance().getDefaultProject(); final List<Sdk> sdks = PyConfigurableInterpreterList.getInstance(project).getAllPythonSdks(); VirtualEnvProjectFilter.removeAllAssociated(sdks); final Sdk preferred = sdks.isEmpty() ? null : sdks.iterator().next(); mySdkCombo = new PythonSdkChooserCombo(project, sdks, new Condition<Sdk>() { @Override public boolean value(Sdk sdk) { return sdk == preferred; } }); mySdkCombo.setButtonIcon(PythonIcons.Python.InterpreterGear); c.gridx = 1; c.gridy = 1; c.weightx = 1.; panel.add(mySdkCombo, c); myFrameworkCheckbox = new JBCheckBox("Install <framework>"); c.gridx = 0; c.gridy = 2; c.gridwidth = 2; c.weightx = 0.0; panel.add(myFrameworkCheckbox, c); myFrameworkCheckbox.setVisible(false); registerValidators(); return panel; } protected void registerValidators() { myFrameworkCheckbox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { myInstallFrameworkChanged = true; checkValid(); } }); myLocationField.getTextField().getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(DocumentEvent e) { checkValid(); } }); final ActionListener listener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { checkValid(); } }; mySdkCombo.getComboBox().addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent event) { checkValid(); } }); myLocationField.getTextField().addActionListener(listener); mySdkCombo.getComboBox().addActionListener(listener); mySdkCombo.addActionListener(listener); } public boolean checkValid() { final String projectName = myLocationField.getText(); if (projectName.trim().isEmpty()) { setErrorText("Project name can't be empty"); return false; } if (myLocationField.getText().indexOf('$') >= 0) { setErrorText("Project directory name must not contain the $ character"); return false; } if (myProjectGenerator != null) { final String baseDirPath = myLocationField.getTextField().getText(); ValidationResult validationResult = myProjectGenerator.validate(baseDirPath); if (!validationResult.isOk()) { setErrorText(validationResult.getErrorMessage()); return false; } if (myProjectGenerator instanceof WebProjectTemplate) { final ValidationInfo validationInfo = ((WebProjectTemplate)myProjectGenerator).getPeer().validate(); if (validationInfo != null) { setErrorText(validationInfo.message); return false; } } } final Sdk sdk = getSdk(); setErrorText(null); myFrameworkCheckbox.setVisible(false); final boolean isPy3k = sdk != null && PythonSdkType.getLanguageLevelForSdk(sdk).isPy3K(); if (sdk != null && PythonSdkType.isRemote(sdk) && !acceptsRemoteSdk(myProjectGenerator)) { setErrorText("Please choose a local interpreter"); return false; } else if (myProjectGenerator instanceof PyFrameworkProjectGenerator) { PyFrameworkProjectGenerator frameworkProjectGenerator = (PyFrameworkProjectGenerator)myProjectGenerator; String frameworkName = frameworkProjectGenerator.getFrameworkTitle(); if (sdk != null && !isFrameworkInstalled(sdk)) { final PyPackageManagerImpl packageManager = (PyPackageManagerImpl)PyPackageManager.getInstance(sdk); final boolean onlyWithCache = PythonSdkFlavor.getFlavor(sdk) instanceof JythonSdkFlavor || PythonSdkFlavor.getFlavor(sdk) instanceof PyPySdkFlavor; try { if (onlyWithCache && packageManager.cacheIsNotNull() || !onlyWithCache) { final PyPackage pip = packageManager.findPackage("pip"); myFrameworkCheckbox.setText("Install " + frameworkName); myFrameworkCheckbox.setMnemonic(frameworkName.charAt(0)); myFrameworkCheckbox.setVisible(pip != null); if (!myInstallFrameworkChanged) { myFrameworkCheckbox.setSelected(pip != null); } } } catch (PyExternalProcessException e) { myFrameworkCheckbox.setVisible(false); } if (!myFrameworkCheckbox.isSelected()) { setErrorText("No " + frameworkName + " support installed in selected interpreter"); return false; } } if (isPy3k && !((PyFrameworkProjectGenerator)myProjectGenerator).supportsPython3()) { setErrorText(frameworkName + " is not supported for the selected interpreter"); return false; } } if (sdk == null) { setErrorText("No Python interpreter selected"); return false; } return true; } public void setErrorText(@Nullable String text) { myErrorLabel.setText(text); myCreateButton.setEnabled(text == null); } public void selectCompatiblePython() { DirectoryProjectGenerator generator = getProjectGenerator(); if (generator instanceof PyFrameworkProjectGenerator && !((PyFrameworkProjectGenerator)generator).supportsPython3()) { Sdk sdk = getSdk(); if (sdk != null && PythonSdkType.getLanguageLevelForSdk(sdk).isPy3K()) { Sdk python2Sdk = PythonSdkType.findPython2Sdk(null); if (python2Sdk != null) { mySdkCombo.getComboBox().setSelectedItem(python2Sdk); mySdkCombo.getComboBox().repaint(); } } } } private static boolean acceptsRemoteSdk(DirectoryProjectGenerator generator) { if (generator instanceof PyFrameworkProjectGenerator) { return ((PyFrameworkProjectGenerator)generator).acceptsRemoteSdk(); } return true; } private boolean isFrameworkInstalled(Sdk sdk) { PyFrameworkProjectGenerator projectGenerator = (PyFrameworkProjectGenerator)getProjectGenerator(); return projectGenerator != null && projectGenerator.isFrameworkInstalled(sdk); } @Nullable protected JPanel createAdvancedSettings() { return null; } public DirectoryProjectGenerator getProjectGenerator() { return myProjectGenerator; } private static class Button extends ActionButtonWithText { private static final Icon DEFAULT_ICON = PythonIcons.Python.Python; public Button(AnAction action, Presentation presentation) { super(action, wrapIcon(presentation), "NewProject", new Dimension(32, 32)); setBorder(new EmptyBorder(3, 3, 3, 3)); } @Override public String getToolTipText() { return null; } @Override protected int horizontalTextAlignment() { return SwingConstants.RIGHT; } @Override protected int iconTextSpace() { return 8; } private static Presentation wrapIcon(Presentation presentation) { Icon original = presentation.getIcon(); CenteredIcon centered = new CenteredIcon(original != null ? original : DEFAULT_ICON, 40, 40, false); presentation.setIcon(centered); return presentation; } } public Sdk getSdk() { return (Sdk)mySdkCombo.getComboBox().getSelectedItem(); } public String getProjectLocation() { return myLocationField.getText(); } public boolean installFramework() { return myFrameworkCheckbox.isSelected() && myFrameworkCheckbox.isVisible(); } }
check if settings are valid in generator-specific settings listeners
python/ide/src/com/jetbrains/python/newProject/actions/AbstractProjectSettingsStep.java
check if settings are valid in generator-specific settings listeners
Java
apache-2.0
53c6ca15e0b032197767f7b1c7bd1c4ac3bae164
0
tweise/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,tweise/apex-malhar,ananthc/apex-malhar,prasannapramod/apex-malhar,DataTorrent/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,prasannapramod/apex-malhar,chandnisingh/apex-malhar,siyuanh/incubator-apex-malhar,prasannapramod/apex-malhar,sandeep-n/incubator-apex-malhar,apache/incubator-apex-malhar,yogidevendra/apex-malhar,vrozov/apex-malhar,sandeep-n/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,chandnisingh/apex-malhar,vrozov/incubator-apex-malhar,ilganeli/incubator-apex-malhar,yogidevendra/apex-malhar,tweise/incubator-apex-malhar,skekre98/apex-mlhr,ananthc/apex-malhar,siyuanh/apex-malhar,chinmaykolhatkar/apex-malhar,apache/incubator-apex-malhar,skekre98/apex-mlhr,apache/incubator-apex-malhar,chandnisingh/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,ananthc/apex-malhar,vrozov/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,siyuanh/incubator-apex-malhar,tweise/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,skekre98/apex-mlhr,yogidevendra/incubator-apex-malhar,prasannapramod/apex-malhar,chinmaykolhatkar/apex-malhar,tweise/apex-malhar,vrozov/apex-malhar,PramodSSImmaneni/apex-malhar,siyuanh/incubator-apex-malhar,ananthc/apex-malhar,sandeep-n/incubator-apex-malhar,brightchen/apex-malhar,yogidevendra/apex-malhar,siyuanh/apex-malhar,yogidevendra/incubator-apex-malhar,yogidevendra/apex-malhar,ilganeli/incubator-apex-malhar,trusli/apex-malhar,tweise/apex-malhar,vrozov/apex-malhar,trusli/apex-malhar,skekre98/apex-mlhr,chandnisingh/apex-malhar,davidyan74/apex-malhar,tweise/apex-malhar,chandnisingh/apex-malhar,patilvikram/apex-malhar,apache/incubator-apex-malhar,ilganeli/incubator-apex-malhar,siyuanh/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,patilvikram/apex-malhar,yogidevendra/apex-malhar,sandeep-n/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,skekre98/apex-mlhr,PramodSSImmaneni/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,trusli/apex-malhar,siyuanh/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,PramodSSImmaneni/apex-malhar,ilganeli/incubator-apex-malhar,vrozov/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,sandeep-n/incubator-apex-malhar,vrozov/apex-malhar,siyuanh/apex-malhar,prasannapramod/apex-malhar,prasannapramod/apex-malhar,PramodSSImmaneni/apex-malhar,siyuanh/apex-malhar,skekre98/apex-mlhr,tushargosavi/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,brightchen/apex-malhar,yogidevendra/incubator-apex-malhar,DataTorrent/Megh,chinmaykolhatkar/incubator-apex-malhar,patilvikram/apex-malhar,tushargosavi/incubator-apex-malhar,brightchen/apex-malhar,tushargosavi/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,brightchen/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,siyuanh/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,tweise/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,vrozov/apex-malhar,trusli/apex-malhar,tweise/incubator-apex-malhar,apache/incubator-apex-malhar,chandnisingh/apex-malhar,siyuanh/apex-malhar,vrozov/incubator-apex-malhar,vrozov/incubator-apex-malhar,ananthc/apex-malhar,siyuanh/incubator-apex-malhar,PramodSSImmaneni/apex-malhar,tweise/apex-malhar,ilganeli/incubator-apex-malhar,patilvikram/apex-malhar,PramodSSImmaneni/apex-malhar,trusli/apex-malhar,patilvikram/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,trusli/apex-malhar,tweise/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,patilvikram/apex-malhar,DataTorrent/incubator-apex-malhar,ilganeli/incubator-apex-malhar,chandnisingh/apex-malhar,DataTorrent/Megh,ananthc/apex-malhar,yogidevendra/apex-malhar,tushargosavi/incubator-apex-malhar,sandeep-n/incubator-apex-malhar,vrozov/incubator-apex-malhar,tweise/incubator-apex-malhar,patilvikram/apex-malhar,sandeep-n/incubator-apex-malhar,siyuanh/apex-malhar,davidyan74/apex-malhar,brightchen/apex-malhar,davidyan74/apex-malhar,yogidevendra/incubator-apex-malhar,apache/incubator-apex-malhar,PramodSSImmaneni/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,vrozov/incubator-apex-malhar,davidyan74/apex-malhar,DataTorrent/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,davidyan74/apex-malhar,PramodSSImmaneni/apex-malhar,siyuanh/apex-malhar,apache/incubator-apex-malhar,brightchen/apex-malhar,vrozov/incubator-apex-malhar,trusli/apex-malhar,ilganeli/incubator-apex-malhar,tweise/incubator-apex-malhar,brightchen/apex-malhar,tushargosavi/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,davidyan74/apex-malhar
/* * Copyright (c) 2012 Malhar, Inc. * All Rights Reserved. */ package com.malhartech.lib.testbench; import com.malhartech.api.BaseOperator; import com.malhartech.api.Context.OperatorContext; import com.malhartech.api.DefaultInputPort; import com.malhartech.api.DefaultOutputPort; import java.util.HashMap; import java.util.Map; import javax.validation.constraints.Min; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Expects incoming stream to be a HashMap<String, Integer> and add all integer values to compute throughput. These * values are throughput per window from upstream operators. On end of window this total and average is emitted<p> * <br> * <br> * Benchmarks: This node has been benchmarked at over 5 million tuples/second in local/inline mode<br> * * <b>Tuple Schema</b> * Each input tuple is HashMap<String, Integer><br> * Output tuple is a HashMap<String, Integer>, where strings are throughputs, averages etc<br> * <b>Port Interface</b><br> * <b>count</b>: Output port for emitting the results<br> * <b>data</b>: Input port for receiving the incoming tuple<br> * <br> * <b>Properties</b>: * rolling_window_count: Number of windows to average over * <br> * Compile time checks are:<br> * none * <br> * <b>Benchmarks</b>: Blast as many tuples as possible in inline mode<br> * Benchmarked at over 17 million tuples/second in local/in-line mode<br> * <br> * @author amol */ public class ThroughputCounter<K> extends BaseOperator { private static Logger log = LoggerFactory.getLogger(ThroughputCounter.class); public final transient DefaultInputPort<HashMap<K, Integer>> data = new DefaultInputPort<HashMap<K, Integer>>(this) { @Override public void process(HashMap<K, Integer> tuple) { for (Map.Entry<K, Integer> e: tuple.entrySet()) { tuple_count += e.getValue().longValue(); } } }; public final transient DefaultOutputPort<HashMap<String,Number>> count = new DefaultOutputPort<HashMap<String, Number>>(this); public static final String OPORT_COUNT_TUPLE_AVERAGE = "avg"; public static final String OPORT_COUNT_TUPLE_COUNT = "count"; public static final String OPORT_COUNT_TUPLE_TIME = "window_time"; public static final String OPORT_COUNT_TUPLE_TUPLES_PERSEC = "tuples_per_sec"; public static final String OPORT_COUNT_TUPLE_WINDOWID = "window_id"; private long windowStartTime = 0; @Min(1) private int rolling_window_count = 1; long[] tuple_numbers = null; long[] time_numbers = null; int tuple_index = 0; int count_denominator = 1; long count_windowid = 0; long tuple_count = 1; // so that the first begin window starts the count down boolean didemit = false; @Min(1) public int getRollingWindowCount() { return rolling_window_count; } public void setRollingWindowCount(int i) { rolling_window_count = i; } @Override public void setup(OperatorContext context) { windowStartTime = System.currentTimeMillis(); log.debug(String.format("\nTupleCounter: set window to %d", rolling_window_count)); if (rolling_window_count != 1) { // Initialized the tuple_numbers tuple_numbers = new long[rolling_window_count]; time_numbers = new long[rolling_window_count]; for (int i = tuple_numbers.length; i > 0; i--) { tuple_numbers[i - 1] = 0; time_numbers[i - 1] = 0; } tuple_index = 0; } } @Override public void beginWindow(long windowId) { if (tuple_count != 0) { // Do not restart time if no tuples were sent windowStartTime = System.currentTimeMillis(); if (didemit) { tuple_count = 0; } } } /** * convenient method for not sending more than configured number of windows. */ @Override public void endWindow() { if (tuple_count == 0) { return; } long elapsedTime = System.currentTimeMillis() - windowStartTime; if (elapsedTime == 0) { didemit = false; return; } long average; long tuples_per_sec = (tuple_count * 1000) / elapsedTime; // * 1000 as elapsedTime is in millis if (rolling_window_count == 1) { average = tuples_per_sec; } else { // use tuple_numbers long slots; if (count_denominator == rolling_window_count) { tuple_numbers[tuple_index] = tuple_count; time_numbers[tuple_index] = elapsedTime; slots = rolling_window_count; tuple_index++; if (tuple_index == rolling_window_count) { tuple_index = 0; } } else { tuple_numbers[count_denominator - 1] = tuple_count; time_numbers[count_denominator - 1] = elapsedTime; slots = count_denominator; count_denominator++; } long time_slot = 0; long numtuples = 0; for (int i = 0; i < slots; i++) { numtuples += tuple_numbers[i]; time_slot += time_numbers[i]; } average = (numtuples * 1000) / time_slot; } HashMap<String, Number> tuples = new HashMap<String, Number>(); tuples.put(OPORT_COUNT_TUPLE_AVERAGE, new Long(average)); tuples.put(OPORT_COUNT_TUPLE_COUNT, new Long(tuple_count)); tuples.put(OPORT_COUNT_TUPLE_TIME, new Long(elapsedTime)); tuples.put(OPORT_COUNT_TUPLE_TUPLES_PERSEC, new Long(tuples_per_sec)); tuples.put(OPORT_COUNT_TUPLE_WINDOWID, new Long(count_windowid++)); count.emit(tuples); didemit = true; } }
library/src/main/java/com/malhartech/lib/testbench/ThroughputCounter.java
/* * Copyright (c) 2012 Malhar, Inc. * All Rights Reserved. */ package com.malhartech.lib.testbench; import com.malhartech.api.BaseOperator; import com.malhartech.api.Context.OperatorContext; import com.malhartech.api.DefaultInputPort; import com.malhartech.api.DefaultOutputPort; import java.util.HashMap; import java.util.Map; import javax.validation.constraints.Min; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Expects incoming stream to be a HashMap<String, Integer> and add all integer values to compute throughput. These * values are throughput per window from upstream operators. On end of window this total and average is emitted<p> * <br> * <br> * Benchmarks: This node has been benchmarked at over 5 million tuples/second in local/inline mode<br> * * <b>Tuple Schema</b> * Each input tuple is HashMap<String, Integer><br> * Output tuple is a HashMap<String, Integer>, where strings are throughputs, averages etc<br> * <b>Port Interface</b><br> * <b>count</b>: Output port for emitting the results<br> * <b>data</b>: Input port for receiving the incoming tuple<br> * <br> * <b>Properties</b>: * rolling_window_count: Number of windows to average over * <br> * Compile time checks are:<br> * none * <br> * <b>Benchmarks</b>: Blast as many tuples as possible in inline mode<br> * Benchmarked at over 17 million tuples/second in local/in-line mode<br> * <br> * @author amol */ public class ThroughputCounter<K> extends BaseOperator { private static Logger log = LoggerFactory.getLogger(ThroughputCounter.class); public final transient DefaultInputPort<HashMap<K, Integer>> data = new DefaultInputPort<HashMap<K, Integer>>(this) { @Override public void process(HashMap<K, Integer> tuple) { for (Map.Entry<K, Integer> e: tuple.entrySet()) { tuple_count += e.getValue().longValue(); } } }; public final transient DefaultOutputPort<HashMap<String,Number>> count = new DefaultOutputPort<HashMap<String, Number>>(this); public static final String OPORT_COUNT_TUPLE_AVERAGE = "avg"; public static final String OPORT_COUNT_TUPLE_COUNT = "count"; public static final String OPORT_COUNT_TUPLE_TIME = "window_time"; public static final String OPORT_COUNT_TUPLE_TUPLES_PERSEC = "tuples_per_sec"; public static final String OPORT_COUNT_TUPLE_WINDOWID = "window_id"; private long windowStartTime = 0; private int rolling_window_count_default = 1; @Min(1) private int rolling_window_count = rolling_window_count_default; long[] tuple_numbers = null; long[] time_numbers = null; int tuple_index = 0; int count_denominator = 1; long count_windowid = 0; long tuple_count = 1; // so that the first begin window starts the count down boolean didemit = false; @Min(1) public int getRollingWindowCount() { return rolling_window_count; } public void setRollingWindowCount(int i) { rolling_window_count = i; } @Override public void setup(OperatorContext context) { windowStartTime = System.currentTimeMillis(); log.debug(String.format("\nTupleCounter: set window to %d", rolling_window_count)); if (rolling_window_count != 1) { // Initialized the tuple_numbers tuple_numbers = new long[rolling_window_count]; time_numbers = new long[rolling_window_count]; for (int i = tuple_numbers.length; i > 0; i--) { tuple_numbers[i - 1] = 0; time_numbers[i - 1] = 0; } tuple_index = 0; } } @Override public void beginWindow(long windowId) { if (tuple_count != 0) { // Do not restart time if no tuples were sent windowStartTime = System.currentTimeMillis(); if (didemit) { tuple_count = 0; } } } /** * convenient method for not sending more than configured number of windows. */ @Override public void endWindow() { if (tuple_count == 0) { return; } long elapsedTime = System.currentTimeMillis() - windowStartTime; if (elapsedTime == 0) { didemit = false; return; } long average; long tuples_per_sec = (tuple_count * 1000) / elapsedTime; // * 1000 as elapsedTime is in millis if (rolling_window_count == 1) { average = tuples_per_sec; } else { // use tuple_numbers long slots; if (count_denominator == rolling_window_count) { tuple_numbers[tuple_index] = tuple_count; time_numbers[tuple_index] = elapsedTime; slots = rolling_window_count; tuple_index++; if (tuple_index == rolling_window_count) { tuple_index = 0; } } else { tuple_numbers[count_denominator - 1] = tuple_count; time_numbers[count_denominator - 1] = elapsedTime; slots = count_denominator; count_denominator++; } long time_slot = 0; long numtuples = 0; for (int i = 0; i < slots; i++) { numtuples += tuple_numbers[i]; time_slot += time_numbers[i]; } average = (numtuples * 1000) / time_slot; } HashMap<String, Number> tuples = new HashMap<String, Number>(); tuples.put(OPORT_COUNT_TUPLE_AVERAGE, new Long(average)); tuples.put(OPORT_COUNT_TUPLE_COUNT, new Long(tuple_count)); tuples.put(OPORT_COUNT_TUPLE_TIME, new Long(elapsedTime)); tuples.put(OPORT_COUNT_TUPLE_TUPLES_PERSEC, new Long(tuples_per_sec)); tuples.put(OPORT_COUNT_TUPLE_WINDOWID, new Long(count_windowid++)); count.emit(tuples); didemit = true; } }
add throughput calculation to stram
library/src/main/java/com/malhartech/lib/testbench/ThroughputCounter.java
add throughput calculation to stram
Java
apache-2.0
ba84e78754c9723f51aabe3eba901d20728866ad
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.indexing; import com.google.common.annotations.VisibleForTesting; import com.intellij.AppTopics; import com.intellij.history.LocalHistory; import com.intellij.ide.plugins.PluginManager; import com.intellij.ide.startup.ServiceNotReadyException; import com.intellij.lang.ASTNode; import com.intellij.notification.NotificationDisplayType; import com.intellij.notification.NotificationGroup; import com.intellij.notification.NotificationType; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.ex.ActionUtil; import com.intellij.openapi.application.*; import com.intellij.openapi.application.impl.LaterInvocator; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.impl.EditorHighlighterCache; import com.intellij.openapi.extensions.impl.ExtensionPointImpl; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileDocumentManagerListener; import com.intellij.openapi.fileTypes.*; import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.impl.BackgroundableProcessIndicator; import com.intellij.openapi.progress.util.ProgressIndicatorUtils; import com.intellij.openapi.project.*; import com.intellij.openapi.roots.CollectingContentIterator; import com.intellij.openapi.roots.ContentIterator; import com.intellij.openapi.roots.impl.PushedFilePropertiesUpdaterImpl; import com.intellij.openapi.util.*; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.newvfs.ManagingFS; import com.intellij.openapi.vfs.newvfs.NewVirtualFile; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry; import com.intellij.openapi.vfs.newvfs.persistent.FlushingDaemon; import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.SingleRootFileViewProvider; import com.intellij.psi.impl.PsiDocumentTransactionListener; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.PsiTreeChangeEventImpl; import com.intellij.psi.impl.cache.impl.id.IdIndex; import com.intellij.psi.impl.cache.impl.id.PlatformIdTableBuilding; import com.intellij.psi.impl.source.PsiFileImpl; import com.intellij.psi.search.EverythingGlobalScope; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.stubs.SerializationManagerEx; import com.intellij.util.*; import com.intellij.util.concurrency.BoundedTaskExecutor; import com.intellij.util.concurrency.SequentialTaskExecutor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.IntObjectMap; import com.intellij.util.gist.GistManager; import com.intellij.util.indexing.hash.FileContentHashIndex; import com.intellij.util.indexing.hash.FileContentHashIndexExtension; import com.intellij.util.indexing.impl.InvertedIndexValueIterator; import com.intellij.util.indexing.provided.ProvidedIndexExtension; import com.intellij.util.indexing.provided.ProvidedIndexExtensionLocator; import com.intellij.util.io.DataOutputStream; import com.intellij.util.io.IOUtil; import com.intellij.util.io.storage.HeavyProcessLatch; import com.intellij.util.messages.MessageBus; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.UIUtil; import gnu.trove.THashMap; import gnu.trove.THashSet; import gnu.trove.TIntArrayList; import gnu.trove.TIntHashSet; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.*; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; /** * @author Eugene Zhuravlev */ public final class FileBasedIndexImpl extends FileBasedIndex implements Disposable { private static final ThreadLocal<VirtualFile> ourIndexedFile = new ThreadLocal<>(); static final Logger LOG = Logger.getInstance("#com.intellij.util.indexing.FileBasedIndexImpl"); private static final String CORRUPTION_MARKER_NAME = "corruption.marker"; private static final NotificationGroup NOTIFICATIONS = new NotificationGroup("Indexing", NotificationDisplayType.BALLOON, false); private final List<ID<?, ?>> myIndicesForDirectories = new SmartList<>(); private final Map<ID<?, ?>, DocumentUpdateTask> myUnsavedDataUpdateTasks = new ConcurrentHashMap<>(); private final Set<ID<?, ?>> myNotRequiringContentIndices = new THashSet<>(); private final Set<ID<?, ?>> myRequiringContentIndices = new THashSet<>(); private final Set<ID<?, ?>> myPsiDependentIndices = new THashSet<>(); private final Set<FileType> myNoLimitCheckTypes = new THashSet<>(); private volatile boolean myExtensionsRelatedDataWasLoaded; private final PerIndexDocumentVersionMap myLastIndexedDocStamps = new PerIndexDocumentVersionMap(); @NotNull private final ChangedFilesCollector myChangedFilesCollector; private final List<IndexableFileSet> myIndexableSets = ContainerUtil.createLockFreeCopyOnWriteList(); private final Map<IndexableFileSet, Project> myIndexableSetToProjectMap = new THashMap<>(); private final MessageBusConnection myConnection; private final FileDocumentManager myFileDocumentManager; private final FileTypeManagerImpl myFileTypeManager; private final Set<ID<?, ?>> myUpToDateIndicesForUnsavedOrTransactedDocuments = ContainerUtil.newConcurrentSet(); private volatile SmartFMap<Document, PsiFile> myTransactionMap = SmartFMap.emptyMap(); private final boolean myIsUnitTestMode; @Nullable private ScheduledFuture<?> myFlushingFuture; private final AtomicInteger myLocalModCount = new AtomicInteger(); private final AtomicInteger myFilesModCount = new AtomicInteger(); private final AtomicInteger myUpdatingFiles = new AtomicInteger(); private final Set<Project> myProjectsBeingUpdated = ContainerUtil.newConcurrentSet(); private final IndexAccessValidator myAccessValidator = new IndexAccessValidator(); private volatile boolean myInitialized; private Future<IndexConfiguration> myStateFuture; private volatile IndexConfiguration myState; private volatile Future<?> myAllIndicesInitializedFuture; private IndexConfiguration getState() { if (!myInitialized) { //throw new IndexNotReadyException(); LOG.error("Unexpected initialization problem"); } IndexConfiguration state = myState; // memory barrier if (state == null) { try { myState = state = myStateFuture.get(); } catch (Throwable t) { throw new RuntimeException(t); } } return state; } public FileBasedIndexImpl(Application application, VirtualFileManager vfManager, FileDocumentManager fdm, FileTypeManagerImpl fileTypeManager, @NotNull MessageBus bus, ManagingFS managingFS) { myFileDocumentManager = fdm; myFileTypeManager = fileTypeManager; myIsUnitTestMode = application.isUnitTestMode(); final MessageBusConnection connection = bus.connect(); connection.subscribe(PsiDocumentTransactionListener.TOPIC, new PsiDocumentTransactionListener() { @Override public void transactionStarted(@NotNull final Document doc, @NotNull final PsiFile file) { myTransactionMap = myTransactionMap.plus(doc, file); myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } @Override public void transactionCompleted(@NotNull final Document doc, @NotNull final PsiFile file) { myTransactionMap = myTransactionMap.minus(doc); } }); connection.subscribe(FileTypeManager.TOPIC, new FileTypeListener() { @Nullable private Map<FileType, Set<String>> myTypeToExtensionMap; @Override public void beforeFileTypesChanged(@NotNull final FileTypeEvent event) { cleanupProcessedFlag(); myTypeToExtensionMap = new THashMap<>(); for (FileType type : myFileTypeManager.getRegisteredFileTypes()) { myTypeToExtensionMap.put(type, getExtensions(type)); } } @Override public void fileTypesChanged(@NotNull final FileTypeEvent event) { final Map<FileType, Set<String>> oldTypeToExtensionsMap = myTypeToExtensionMap; myTypeToExtensionMap = null; if (oldTypeToExtensionsMap != null) { final Map<FileType, Set<String>> newTypeToExtensionsMap = new THashMap<>(); for (FileType type : myFileTypeManager.getRegisteredFileTypes()) { newTypeToExtensionsMap.put(type, getExtensions(type)); } // we are interested only in extension changes or removals. // addition of an extension is handled separately by RootsChanged event if (!newTypeToExtensionsMap.keySet().containsAll(oldTypeToExtensionsMap.keySet())) { Set<FileType> removedFileTypes = new HashSet<>(oldTypeToExtensionsMap.keySet()); removedFileTypes.removeAll(newTypeToExtensionsMap.keySet()); rebuildAllIndices("The following file types were removed/are no longer associated: " + removedFileTypes); return; } for (Map.Entry<FileType, Set<String>> entry : oldTypeToExtensionsMap.entrySet()) { FileType fileType = entry.getKey(); Set<String> strings = entry.getValue(); if (!newTypeToExtensionsMap.get(fileType).containsAll(strings)) { Set<String> removedExtensions = new HashSet<>(strings); removedExtensions.removeAll(newTypeToExtensionsMap.get(fileType)); rebuildAllIndices(fileType.getName() + " is no longer associated with extension(s) " + String.join(",", removedExtensions)); return; } } } } @NotNull private Set<String> getExtensions(@NotNull FileType type) { final Set<String> set = new THashSet<>(); for (FileNameMatcher matcher : myFileTypeManager.getAssociations(type)) { set.add(matcher.getPresentableString()); } return set; } private void rebuildAllIndices(@NotNull String reason) { doClearIndices(); scheduleIndexRebuild("File type change" + ", " + reason); } }); connection.subscribe(AppTopics.FILE_DOCUMENT_SYNC, new FileDocumentManagerListener() { @Override public void fileContentReloaded(@NotNull VirtualFile file, @NotNull Document document) { cleanupMemoryStorage(true); } @Override public void unsavedDocumentsDropped() { cleanupMemoryStorage(false); } }); application.addApplicationListener(new ApplicationListener() { @Override public void writeActionStarted(@NotNull Object action) { myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } }, this); myChangedFilesCollector = new ChangedFilesCollector(managingFS); myConnection = connection; vfManager.addAsyncFileListener(myChangedFilesCollector, this); initComponent(); } @VisibleForTesting void doClearIndices() { waitUntilIndicesAreInitialized(); IndexingStamp.flushCaches(); for (ID<?, ?> indexId : getState().getIndexIDs()) { try { clearIndex(indexId); } catch (StorageException e) { LOG.info(e); } } } boolean processChangedFiles(@NotNull Project project, @NotNull Processor<? super VirtualFile> processor) { // avoid missing files when events are processed concurrently return Stream.concat(myChangedFilesCollector.getEventMerger().getChangedFiles(), myChangedFilesCollector.myFilesToUpdate.values().stream()) .filter(filesToBeIndexedForProjectCondition(project)) .distinct() .mapToInt(f -> processor.process(f) ? 1 : 0) .allMatch(success -> success == 1); } public static boolean isProjectOrWorkspaceFile(@NotNull VirtualFile file, @Nullable FileType fileType) { return ProjectCoreUtil.isProjectOrWorkspaceFile(file, fileType); } static boolean belongsToScope(VirtualFile file, VirtualFile restrictedTo, GlobalSearchScope filter) { if (!(file instanceof VirtualFileWithId) || !file.isValid()) { return false; } return (restrictedTo == null || Comparing.equal(file, restrictedTo)) && (filter == null || restrictedTo != null || filter.accept(file)); } @Override public void requestReindex(@NotNull final VirtualFile file) { GistManager.getInstance().invalidateData(); // todo: this is the same vfs event handling sequence that is produces after events of FileContentUtilCore.reparseFiles // but it is more costly than current code, see IDEA-192192 //myChangedFilesCollector.invalidateIndicesRecursively(file, false); //myChangedFilesCollector.buildIndicesForFileRecursively(file, false); myChangedFilesCollector.invalidateIndicesRecursively(file, true, myChangedFilesCollector.getEventMerger()); if (myInitialized) myChangedFilesCollector.ensureUpToDateAsync(); } private void initComponent() { myStateFuture = IndexInfrastructure.submitGenesisTask(new FileIndexDataInitialization()); if (!IndexInfrastructure.ourDoAsyncIndicesInitialization) { waitUntilIndicesAreInitialized(); } } private void waitUntilIndicesAreInitialized() { try { myStateFuture.get(); } catch (Throwable t) { LOG.error(t); } } /** * @return true if registered index requires full rebuild for some reason, e.g. is just created or corrupted */ private static <K, V> boolean registerIndexer(@NotNull final FileBasedIndexExtension<K, V> extension, @NotNull IndexConfiguration state) throws IOException { final ID<K, V> name = extension.getName(); final int version = extension.getVersion(); final File versionFile = IndexInfrastructure.getVersionFile(name); boolean versionChanged = false; if (IndexingStamp.versionDiffers(name, version)) { final boolean versionFileExisted = versionFile.exists(); if (versionFileExisted) { versionChanged = true; LOG.info("Version has changed for index " + name + ". The index will be rebuilt."); } else { LOG.debug("Index " + name + " will be built."); } if (extension.hasSnapshotMapping() && versionChanged) { FileUtil.deleteWithRenaming(IndexInfrastructure.getPersistentIndexRootDir(name)); } File rootDir = IndexInfrastructure.getIndexRootDir(name); if (versionFileExisted) FileUtil.deleteWithRenaming(rootDir); IndexingStamp.rewriteVersion(name, version); } initIndexStorage(extension, version, state); return versionChanged; } private static <K, V> void initIndexStorage(@NotNull FileBasedIndexExtension<K, V> extension, int version, @NotNull IndexConfiguration state) throws IOException { VfsAwareMapIndexStorage<K, V> storage = null; final ID<K, V> name = extension.getName(); boolean contentHashesEnumeratorOk = false; for (int attempt = 0; attempt < 2; attempt++) { try { if (extension.hasSnapshotMapping()) { ContentHashesSupport.initContentHashesEnumerator(); contentHashesEnumeratorOk = true; } storage = new VfsAwareMapIndexStorage<>( IndexInfrastructure.getStorageFile(name), extension.getKeyDescriptor(), extension.getValueExternalizer(), extension.getCacheSize(), extension.keyIsUniqueForIndexedFile(), extension.traceKeyHashToVirtualFileMapping() ); final InputFilter inputFilter = extension.getInputFilter(); final Set<FileType> addedTypes; if (inputFilter instanceof FileBasedIndex.FileTypeSpecificInputFilter) { addedTypes = new THashSet<>(); ((FileBasedIndex.FileTypeSpecificInputFilter)inputFilter).registerFileTypesUsedForIndexing(type -> { if (type != null) addedTypes.add(type); }); } else { addedTypes = null; } UpdatableIndex<K, V, FileContent> index = createIndex(extension, new MemoryIndexStorage<>(storage, name)); ProvidedIndexExtension<K, V> providedExtension = ProvidedIndexExtensionLocator.findProvidedIndexExtensionFor(extension); if (providedExtension != null) { index = ProvidedIndexExtension.wrapWithProvidedIndex(providedExtension, extension, index); } state.registerIndex(name, index, file -> file instanceof VirtualFileWithId && inputFilter.acceptInput(file) && !GlobalIndexFilter.isExcludedFromIndexViaFilters(file, name), version + GlobalIndexFilter.getFiltersVersion(name), addedTypes); break; } catch (Exception e) { LOG.info(e); boolean instantiatedStorage = storage != null; try { if (storage != null) storage.close(); storage = null; } catch (Exception ignored) { } FileUtil.deleteWithRenaming(IndexInfrastructure.getIndexRootDir(name)); if (extension.hasSnapshotMapping() && (!contentHashesEnumeratorOk || instantiatedStorage)) { FileUtil.deleteWithRenaming(IndexInfrastructure.getPersistentIndexRootDir(name)); // todo there is possibility of corruption of storage and content hashes } IndexingStamp.rewriteVersion(name, version); } } } private static void saveRegisteredIndicesAndDropUnregisteredOnes(@NotNull Collection<? extends ID<?, ?>> ids) { if (ApplicationManager.getApplication().isDisposed() || !IndexInfrastructure.hasIndices()) { return; } final File registeredIndicesFile = new File(PathManager.getIndexRoot(), "registered"); final Set<String> indicesToDrop = new THashSet<>(); try (DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(registeredIndicesFile)))) { final int size = in.readInt(); for (int idx = 0; idx < size; idx++) { indicesToDrop.add(IOUtil.readString(in)); } } catch (IOException ignored) { } for (ID<?, ?> key : ids) { indicesToDrop.remove(key.getName()); } if (!indicesToDrop.isEmpty()) { LOG.info("Dropping indices:" + StringUtil.join(indicesToDrop, ",")); for (String s : indicesToDrop) { FileUtil.deleteWithRenaming(IndexInfrastructure.getIndexRootDir(ID.create(s))); } } FileUtil.createIfDoesntExist(registeredIndicesFile); try (DataOutputStream os = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(registeredIndicesFile)))) { os.writeInt(ids.size()); for (ID<?, ?> id : ids) { IOUtil.writeString(id.getName(), os); } } catch (IOException ignored) { } } @NotNull private static <K, V> UpdatableIndex<K, V, FileContent> createIndex(@NotNull final FileBasedIndexExtension<K, V> extension, @NotNull final MemoryIndexStorage<K, V> storage) throws StorageException, IOException { return extension instanceof CustomImplementationFileBasedIndexExtension ? ((CustomImplementationFileBasedIndexExtension<K, V>)extension).createIndexImplementation(extension, storage) : new VfsAwareMapReduceIndex<>(extension, storage); } @Override public void dispose() { performShutdown(); } private final AtomicBoolean myShutdownPerformed = new AtomicBoolean(false); private void performShutdown() { if (!myShutdownPerformed.compareAndSet(false, true)) { return; // already shut down } waitUntilAllIndicesAreInitialized(); try { if (myFlushingFuture != null) { myFlushingFuture.cancel(false); myFlushingFuture = null; } } finally { LOG.info("START INDEX SHUTDOWN"); try { PersistentIndicesConfiguration.saveConfiguration(); for (VirtualFile file : myChangedFilesCollector.getAllFilesToUpdate()) { if (!file.isValid()) { removeDataFromIndicesForFile(Math.abs(getIdMaskingNonIdBasedFile(file)), file); } } IndexingStamp.flushCaches(); IndexConfiguration state = getState(); for (ID<?, ?> indexId : state.getIndexIDs()) { try { final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); assert index != null; if (!RebuildStatus.isOk(indexId)) { index.clear(); // if the index was scheduled for rebuild, only clean it } index.dispose(); } catch (Throwable throwable) { LOG.info("Problem disposing " + indexId, throwable); } } ContentHashesSupport.flushContentHashes(); SharedIndicesData.flushData(); myConnection.disconnect(); } catch (Throwable e) { LOG.error("Problems during index shutdown", e); } LOG.info("END INDEX SHUTDOWN"); } } private void waitUntilAllIndicesAreInitialized() { try { waitUntilIndicesAreInitialized(); myAllIndicesInitializedFuture.get(); } catch (Throwable ignore) {} } private void removeDataFromIndicesForFile(int fileId, VirtualFile file) { VirtualFile originalFile = file instanceof DeletedVirtualFileStub ? ((DeletedVirtualFileStub)file).getOriginalFile() : file; final List<ID<?, ?>> states = IndexingStamp.getNontrivialFileIndexedStates(fileId); if (!states.isEmpty()) { ProgressManager.getInstance().executeNonCancelableSection(() -> removeFileDataFromIndices(states, fileId, originalFile)); } } private void removeFileDataFromIndices(@NotNull Collection<? extends ID<?, ?>> affectedIndices, int inputId, VirtualFile file) { // document diff can depend on previous value that will be removed removeTransientFileDataFromIndices(affectedIndices, inputId, file); Throwable unexpectedError = null; for (ID<?, ?> indexId : affectedIndices) { try { updateSingleIndex(indexId, null, inputId, null); } catch (ProcessCanceledException pce) { LOG.error(pce); } catch (Throwable e) { LOG.info(e); if (unexpectedError == null) { unexpectedError = e; } } } IndexingStamp.flushCache(inputId); if (unexpectedError != null) { LOG.error(unexpectedError); } } private void removeTransientFileDataFromIndices(Collection<? extends ID<?, ?>> indices, int inputId, VirtualFile file) { for (ID<?, ?> indexId : indices) { final UpdatableIndex index = myState.getIndex(indexId); assert index != null; index.removeTransientDataForFile(inputId); } Document document = myFileDocumentManager.getCachedDocument(file); if (document != null) { myLastIndexedDocStamps.clearForDocument(document); document.putUserData(ourFileContentKey, null); } if (!myUpToDateIndicesForUnsavedOrTransactedDocuments.isEmpty()) { myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } } private void flushAllIndices(final long modCount) { if (HeavyProcessLatch.INSTANCE.isRunning()) { return; } IndexingStamp.flushCaches(); IndexConfiguration state = getState(); for (ID<?, ?> indexId : new ArrayList<>(state.getIndexIDs())) { if (HeavyProcessLatch.INSTANCE.isRunning() || modCount != myLocalModCount.get()) { return; // do not interfere with 'main' jobs } try { final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); if (index != null) { index.flush(); } } catch (Throwable e) { requestRebuild(indexId, e); } } ContentHashesSupport.flushContentHashes(); SharedIndicesData.flushData(); } @Override @NotNull public <K> Collection<K> getAllKeys(@NotNull final ID<K, ?> indexId, @NotNull Project project) { Set<K> allKeys = new THashSet<>(); processAllKeys(indexId, Processors.cancelableCollectProcessor(allKeys), project); return allKeys; } @Override public <K> boolean processAllKeys(@NotNull final ID<K, ?> indexId, @NotNull Processor<? super K> processor, @Nullable Project project) { return processAllKeys(indexId, processor, project == null ? new EverythingGlobalScope() : GlobalSearchScope.allScope(project), null); } @Override public <K> boolean processAllKeys(@NotNull ID<K, ?> indexId, @NotNull Processor<? super K> processor, @NotNull GlobalSearchScope scope, @Nullable IdFilter idFilter) { try { waitUntilIndicesAreInitialized(); final UpdatableIndex<K, ?, FileContent> index = getIndex(indexId); if (index == null) { return true; } ensureUpToDate(indexId, scope.getProject(), scope); return index.processAllKeys(processor, scope, idFilter); } catch (StorageException e) { scheduleRebuild(indexId, e); } catch (RuntimeException e) { final Throwable cause = e.getCause(); if (cause instanceof StorageException || cause instanceof IOException) { scheduleRebuild(indexId, cause); } else { throw e; } } return false; } @NotNull @Override public <K, V> Map<K, V> getFileData(@NotNull ID<K, V> id, @NotNull VirtualFile virtualFile, @NotNull Project project) { int fileId = getFileId(virtualFile); Map<K, V> map = processExceptions(id, virtualFile, GlobalSearchScope.fileScope(project, virtualFile), index -> index.getIndexedFileData(fileId)); return ContainerUtil.notNullize(map); } private static final ThreadLocal<Integer> myUpToDateCheckState = new ThreadLocal<>(); public static <T,E extends Throwable> T disableUpToDateCheckIn(@NotNull ThrowableComputable<T, E> runnable) throws E { disableUpToDateCheckForCurrentThread(); try { return runnable.compute(); } finally { enableUpToDateCheckForCurrentThread(); } } private static void disableUpToDateCheckForCurrentThread() { final Integer currentValue = myUpToDateCheckState.get(); myUpToDateCheckState.set(currentValue == null ? 1 : currentValue.intValue() + 1); } private static void enableUpToDateCheckForCurrentThread() { final Integer currentValue = myUpToDateCheckState.get(); if (currentValue != null) { final int newValue = currentValue.intValue() - 1; if (newValue != 0) { myUpToDateCheckState.set(newValue); } else { myUpToDateCheckState.remove(); } } } private static boolean isUpToDateCheckEnabled() { final Integer value = myUpToDateCheckState.get(); return value == null || value.intValue() == 0; } private final ThreadLocal<Boolean> myReentrancyGuard = ThreadLocal.withInitial(() -> Boolean.FALSE); /** * DO NOT CALL DIRECTLY IN CLIENT CODE * The method is internal to indexing engine end is called internally. The method is public due to implementation details */ @Override public <K> void ensureUpToDate(@NotNull final ID<K, ?> indexId, @Nullable Project project, @Nullable GlobalSearchScope filter) { waitUntilIndicesAreInitialized(); ensureUpToDate(indexId, project, filter, null); } protected <K> void ensureUpToDate(@NotNull final ID<K, ?> indexId, @Nullable Project project, @Nullable GlobalSearchScope filter, @Nullable VirtualFile restrictedFile) { ProgressManager.checkCanceled(); myChangedFilesCollector.ensureUpToDate(); ApplicationManager.getApplication().assertReadAccessAllowed(); if (!needsFileContentLoading(indexId)) { return; //indexed eagerly in foreground while building unindexed file list } if (filter == GlobalSearchScope.EMPTY_SCOPE) { return; } if (ActionUtil.isDumbMode(project)) { handleDumbMode(project); } NoAccessDuringPsiEvents.checkCallContext(); if (myReentrancyGuard.get().booleanValue()) { //assert false : "ensureUpToDate() is not reentrant!"; return; } myReentrancyGuard.set(Boolean.TRUE); try { if (isUpToDateCheckEnabled()) { try { if (!RebuildStatus.isOk(indexId)) { throw new ServiceNotReadyException(); } forceUpdate(project, filter, restrictedFile); indexUnsavedDocuments(indexId, project, filter, restrictedFile); } catch (RuntimeException e) { final Throwable cause = e.getCause(); if (cause instanceof StorageException || cause instanceof IOException) { scheduleRebuild(indexId, e); } else { throw e; } } } } finally { myReentrancyGuard.set(Boolean.FALSE); } } private static void handleDumbMode(@Nullable Project project) { ProgressManager.checkCanceled(); // DumbModeAction.CANCEL if (project != null) { final ProgressIndicator progressIndicator = ProgressManager.getInstance().getProgressIndicator(); if (progressIndicator instanceof BackgroundableProcessIndicator) { final BackgroundableProcessIndicator indicator = (BackgroundableProcessIndicator)progressIndicator; if (indicator.getDumbModeAction() == DumbModeAction.WAIT) { assert !ApplicationManager.getApplication().isDispatchThread(); DumbService.getInstance(project).waitForSmartMode(); return; } } } throw IndexNotReadyException.create(project == null ? null : DumbServiceImpl.getInstance(project).getDumbModeStartTrace()); } @Override @NotNull public <K, V> List<V> getValues(@NotNull final ID<K, V> indexId, @NotNull K dataKey, @NotNull final GlobalSearchScope filter) { VirtualFile restrictToFile = null; if (filter instanceof Iterable) { // optimisation: in case of one-file-scope we can do better. // check if the scope knows how to extract some files off itself //noinspection unchecked Iterator<VirtualFile> virtualFileIterator = ((Iterable<VirtualFile>)filter).iterator(); if (virtualFileIterator.hasNext()) { VirtualFile restrictToFileCandidate = virtualFileIterator.next(); if (!virtualFileIterator.hasNext()) { restrictToFile = restrictToFileCandidate; } } } final List<V> values = new SmartList<>(); ValueProcessor<V> processor = (file, value) -> { values.add(value); return true; }; if (restrictToFile != null) { processValuesInOneFile(indexId, dataKey, restrictToFile, processor, filter); } else { processValuesInScope(indexId, dataKey, true, filter, null, processor); } return values; } @Override @NotNull public <K, V> Collection<VirtualFile> getContainingFiles(@NotNull final ID<K, V> indexId, @NotNull K dataKey, @NotNull final GlobalSearchScope filter) { final Set<VirtualFile> files = new THashSet<>(); processValuesInScope(indexId, dataKey, false, filter, null, (file, value) -> { files.add(file); return true; }); return files; } @Override public <K, V> boolean processValues(@NotNull final ID<K, V> indexId, @NotNull final K dataKey, @Nullable final VirtualFile inFile, @NotNull ValueProcessor<? super V> processor, @NotNull final GlobalSearchScope filter) { return processValues(indexId, dataKey, inFile, processor, filter, null); } @Override public <K, V> boolean processValues(@NotNull ID<K, V> indexId, @NotNull K dataKey, @Nullable VirtualFile inFile, @NotNull ValueProcessor<? super V> processor, @NotNull GlobalSearchScope filter, @Nullable IdFilter idFilter) { return inFile != null ? processValuesInOneFile(indexId, dataKey, inFile, processor, filter) : processValuesInScope(indexId, dataKey, false, filter, idFilter, processor); } @Override public <K, V> long getIndexModificationStamp(@NotNull ID<K, V> indexId, @NotNull Project project) { UpdatableIndex<K, V, FileContent> index = getState().getIndex(indexId); ensureUpToDate(indexId, project, GlobalSearchScope.allScope(project)); return index.getModificationStamp(); } @FunctionalInterface public interface IdValueProcessor<V> { /** * @param fileId the id of the file that the value came from * @param value a value to process * @return false if no further processing is needed, true otherwise */ boolean process(int fileId, V value); } /** * Process values for a given index key together with their containing file ids. Note that project is supplied * only to ensure that all the indices in that project are up to date; there's no guarantee that the processed file ids belong * to this project. */ public <K, V> boolean processAllValues(@NotNull ID<K, V> indexId, @NotNull K key, @NotNull Project project, @NotNull IdValueProcessor<? super V> processor) { return processValueIterator(indexId, key, null, GlobalSearchScope.allScope(project), valueIt -> { while (valueIt.hasNext()) { V value = valueIt.next(); for (ValueContainer.IntIterator inputIdsIterator = valueIt.getInputIdsIterator(); inputIdsIterator.hasNext(); ) { if (!processor.process(inputIdsIterator.next(), value)) { return false; } ProgressManager.checkCanceled(); } } return true; }); } @Nullable private <K, V, R> R processExceptions(@NotNull final ID<K, V> indexId, @Nullable final VirtualFile restrictToFile, @NotNull final GlobalSearchScope filter, @NotNull ThrowableConvertor<? super UpdatableIndex<K, V, FileContent>, ? extends R, ? extends StorageException> computable) { try { waitUntilIndicesAreInitialized(); final UpdatableIndex<K, V, FileContent> index = getIndex(indexId); if (index == null) { return null; } final Project project = filter.getProject(); //assert project != null : "GlobalSearchScope#getProject() should be not-null for all index queries"; ensureUpToDate(indexId, project, filter, restrictToFile); return myAccessValidator.validate(indexId, ()->ConcurrencyUtil.withLock(index.getReadLock(), ()->computable.convert(index))); } catch (StorageException e) { scheduleRebuild(indexId, e); } catch (RuntimeException e) { final Throwable cause = getCauseToRebuildIndex(e); if (cause != null) { scheduleRebuild(indexId, cause); } else { throw e; } } return null; } private <K, V> boolean processValuesInOneFile(@NotNull ID<K, V> indexId, @NotNull K dataKey, @NotNull VirtualFile restrictToFile, @NotNull ValueProcessor<? super V> processor, @NotNull GlobalSearchScope scope) { if (!(restrictToFile instanceof VirtualFileWithId)) return true; int restrictedFileId = getFileId(restrictToFile); return processValueIterator(indexId, dataKey, restrictToFile, scope, valueIt -> { while (valueIt.hasNext()) { V value = valueIt.next(); if (valueIt.getValueAssociationPredicate().contains(restrictedFileId) && !processor.process(restrictToFile, value)) { return false; } ProgressManager.checkCanceled(); } return true; }); } private <K, V> boolean processValuesInScope(@NotNull ID<K, V> indexId, @NotNull K dataKey, boolean ensureValueProcessedOnce, @NotNull GlobalSearchScope scope, @Nullable IdFilter idFilter, @NotNull ValueProcessor<? super V> processor) { PersistentFS fs = (PersistentFS)ManagingFS.getInstance(); IdFilter filter = idFilter != null ? idFilter : projectIndexableFiles(scope.getProject()); return processValueIterator(indexId, dataKey, null, scope, valueIt -> { while (valueIt.hasNext()) { final V value = valueIt.next(); for (final ValueContainer.IntIterator inputIdsIterator = valueIt.getInputIdsIterator(); inputIdsIterator.hasNext(); ) { final int id = inputIdsIterator.next(); if (filter != null && !filter.containsFileId(id)) continue; VirtualFile file = IndexInfrastructure.findFileByIdIfCached(fs, id); if (file != null && scope.accept(file)) { if (!processor.process(file, value)) { return false; } if (ensureValueProcessedOnce) { ProgressManager.checkCanceled(); break; // continue with the next value } } ProgressManager.checkCanceled(); } } return true; }); } private <K, V> boolean processValueIterator(@NotNull ID<K, V> indexId, @NotNull K dataKey, @Nullable VirtualFile restrictToFile, @NotNull GlobalSearchScope scope, @NotNull Processor<? super InvertedIndexValueIterator<V>> valueProcessor) { final Boolean result = processExceptions(indexId, restrictToFile, scope, index -> valueProcessor.process((InvertedIndexValueIterator<V>)index.getData(dataKey).getValueIterator())); return result == null || result.booleanValue(); } @Override public <K, V> boolean processFilesContainingAllKeys(@NotNull final ID<K, V> indexId, @NotNull final Collection<? extends K> dataKeys, @NotNull final GlobalSearchScope filter, @Nullable Condition<? super V> valueChecker, @NotNull final Processor<? super VirtualFile> processor) { ProjectIndexableFilesFilter filesSet = projectIndexableFiles(filter.getProject()); final TIntHashSet set = collectFileIdsContainingAllKeys(indexId, dataKeys, filter, valueChecker, filesSet); return set != null && processVirtualFiles(set, filter, processor); } private static final Key<SoftReference<ProjectIndexableFilesFilter>> ourProjectFilesSetKey = Key.create("projectFiles"); void filesUpdateEnumerationFinished() { } @TestOnly public void cleanupForNextTest() { myChangedFilesCollector.ensureUpToDate(); myTransactionMap = SmartFMap.emptyMap(); IndexConfiguration state = getState(); for (ID<?, ?> indexId : state.getIndexIDs()) { final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); assert index != null; index.cleanupForNextTest(); } } @TestOnly public IndexedFilesListener getChangedFilesCollector() { return myChangedFilesCollector; } public static final class ProjectIndexableFilesFilter extends IdFilter { private static final int SHIFT = 6; private static final int MASK = (1 << SHIFT) - 1; private final long[] myBitMask; private final int myModificationCount; private final int myMinId; private final int myMaxId; private ProjectIndexableFilesFilter(@NotNull TIntArrayList set, int modificationCount) { myModificationCount = modificationCount; final int[] minMax = new int[2]; if (!set.isEmpty()) { minMax[0] = minMax[1] = set.get(0); } set.forEach(value -> { minMax[0] = Math.min(minMax[0], value); minMax[1] = Math.max(minMax[1], value); return true; }); myMaxId = minMax[1]; myMinId = minMax[0]; myBitMask = new long[((myMaxId - myMinId) >> SHIFT) + 1]; set.forEach(value -> { value -= myMinId; myBitMask[value >> SHIFT] |= (1L << (value & MASK)); return true; }); } @Override public boolean containsFileId(int id) { if (id < myMinId) return false; if (id > myMaxId) return false; id -= myMinId; return (myBitMask[id >> SHIFT] & (1L << (id & MASK))) != 0; } } void filesUpdateStarted(Project project) { myChangedFilesCollector.ensureUpToDate(); myProjectsBeingUpdated.add(project); myFilesModCount.incrementAndGet(); } void filesUpdateFinished(@NotNull Project project) { myProjectsBeingUpdated.remove(project); myFilesModCount.incrementAndGet(); } private final Lock myCalcIndexableFilesLock = new ReentrantLock(); @Nullable public ProjectIndexableFilesFilter projectIndexableFiles(@Nullable Project project) { if (project == null || project.isDefault() || myUpdatingFiles.get() > 0) return null; if (myProjectsBeingUpdated.contains(project)) return null; SoftReference<ProjectIndexableFilesFilter> reference = project.getUserData(ourProjectFilesSetKey); ProjectIndexableFilesFilter data = com.intellij.reference.SoftReference.dereference(reference); int currentFileModCount = myFilesModCount.get(); if (data != null && data.myModificationCount == currentFileModCount) return data; if (myCalcIndexableFilesLock.tryLock()) { // make best effort for calculating filter try { reference = project.getUserData(ourProjectFilesSetKey); data = com.intellij.reference.SoftReference.dereference(reference); if (data != null && data.myModificationCount == currentFileModCount) { return data; } long start = System.currentTimeMillis(); final TIntArrayList filesSet = new TIntArrayList(); iterateIndexableFiles(fileOrDir -> { ProgressManager.checkCanceled(); if (fileOrDir instanceof VirtualFileWithId) { filesSet.add(((VirtualFileWithId)fileOrDir).getId()); } return true; }, project, SilentProgressIndicator.create()); ProjectIndexableFilesFilter filter = new ProjectIndexableFilesFilter(filesSet, currentFileModCount); project.putUserData(ourProjectFilesSetKey, new SoftReference<>(filter)); long finish = System.currentTimeMillis(); LOG.debug(filesSet.size() + " files iterated in " + (finish - start) + " ms"); return filter; } finally { myCalcIndexableFilesLock.unlock(); } } return null; // ok, no filtering } @Nullable private <K, V> TIntHashSet collectFileIdsContainingAllKeys(@NotNull final ID<K, V> indexId, @NotNull final Collection<? extends K> dataKeys, @NotNull final GlobalSearchScope filter, @Nullable final Condition<? super V> valueChecker, @Nullable final ProjectIndexableFilesFilter projectFilesFilter) { ThrowableConvertor<UpdatableIndex<K, V, FileContent>, TIntHashSet, StorageException> convertor = index -> InvertedIndexUtil.collectInputIdsContainingAllKeys(index, dataKeys, __ -> { ProgressManager.checkCanceled(); return true; }, valueChecker, projectFilesFilter == null ? null : projectFilesFilter::containsFileId); return processExceptions(indexId, null, filter, convertor); } private static boolean processVirtualFiles(@NotNull TIntHashSet ids, @NotNull final GlobalSearchScope filter, @NotNull final Processor<? super VirtualFile> processor) { final PersistentFS fs = (PersistentFS)ManagingFS.getInstance(); return ids.forEach(id -> { ProgressManager.checkCanceled(); VirtualFile file = IndexInfrastructure.findFileByIdIfCached(fs, id); if (file != null && filter.accept(file)) { return processor.process(file); } return true; }); } @Nullable public static Throwable getCauseToRebuildIndex(@NotNull RuntimeException e) { if (ApplicationManager.getApplication().isUnitTestMode()) { // avoid rebuilding index in tests since we do it synchronously in requestRebuild and we can have readAction at hand return null; } if (e instanceof ProcessCanceledException) return null; if (e instanceof IndexOutOfBoundsException) return e; // something wrong with direct byte buffer Throwable cause = e.getCause(); if (cause instanceof StorageException || cause instanceof IOException || cause instanceof IllegalArgumentException) return cause; return null; } @Override public <K, V> boolean getFilesWithKey(@NotNull final ID<K, V> indexId, @NotNull final Set<? extends K> dataKeys, @NotNull Processor<? super VirtualFile> processor, @NotNull GlobalSearchScope filter) { return processFilesContainingAllKeys(indexId, dataKeys, filter, null, processor); } @Override public <K> void scheduleRebuild(@NotNull final ID<K, ?> indexId, @NotNull final Throwable e) { requestRebuild(indexId, e); } private static void scheduleIndexRebuild(String reason) { LOG.info("scheduleIndexRebuild, reason: " + reason); for (Project project : ProjectManager.getInstance().getOpenProjects()) { DumbService.getInstance(project).queueTask(new UnindexedFilesUpdater(project)); } } void clearIndicesIfNecessary() { waitUntilIndicesAreInitialized(); for (ID<?, ?> indexId : getState().getIndexIDs()) { try { RebuildStatus.clearIndexIfNecessary(indexId, getIndex(indexId)::clear); } catch (StorageException e) { requestRebuild(indexId); LOG.error(e); } } } private void clearIndex(@NotNull final ID<?, ?> indexId) throws StorageException { advanceIndexVersion(indexId); final UpdatableIndex<?, ?, FileContent> index = myState.getIndex(indexId); assert index != null : "Index with key " + indexId + " not found or not registered properly"; index.clear(); } private void advanceIndexVersion(ID<?, ?> indexId) { try { IndexingStamp.rewriteVersion(indexId, myState.getIndexVersion(indexId)); } catch (IOException e) { LOG.error(e); } } @NotNull private Set<Document> getUnsavedDocuments() { Document[] documents = myFileDocumentManager.getUnsavedDocuments(); if (documents.length == 0) return Collections.emptySet(); if (documents.length == 1) return Collections.singleton(documents[0]); return new THashSet<>(Arrays.asList(documents)); } @NotNull private Set<Document> getTransactedDocuments() { return myTransactionMap.keySet(); } private void indexUnsavedDocuments(@NotNull final ID<?, ?> indexId, @Nullable Project project, final GlobalSearchScope filter, final VirtualFile restrictedFile) { if (myUpToDateIndicesForUnsavedOrTransactedDocuments.contains(indexId)) { return; // no need to index unsaved docs // todo: check scope ? } Collection<Document> documents = getUnsavedDocuments(); boolean psiBasedIndex = myPsiDependentIndices.contains(indexId); if(psiBasedIndex) { Set<Document> transactedDocuments = getTransactedDocuments(); if (documents.isEmpty()) { documents = transactedDocuments; } else if (!transactedDocuments.isEmpty()) { documents = new THashSet<>(documents); documents.addAll(transactedDocuments); } Document[] uncommittedDocuments = project != null ? PsiDocumentManager.getInstance(project).getUncommittedDocuments() : Document.EMPTY_ARRAY; if (uncommittedDocuments.length > 0) { List<Document> uncommittedDocumentsCollection = Arrays.asList(uncommittedDocuments); if (documents.isEmpty()) documents = uncommittedDocumentsCollection; else { if (!(documents instanceof THashSet)) documents = new THashSet<>(documents); documents.addAll(uncommittedDocumentsCollection); } } } if (!documents.isEmpty()) { Collection<Document> documentsToProcessForProject = ContainerUtil.filter(documents, document -> belongsToScope(myFileDocumentManager.getFile(document), restrictedFile, filter)); if (!documentsToProcessForProject.isEmpty()) { DocumentUpdateTask task = myUnsavedDataUpdateTasks.get(indexId); assert task != null : "Task for unsaved data indexing was not initialized for index " + indexId; if(runUpdate(true, () -> task.processAll(documentsToProcessForProject, project)) && documentsToProcessForProject.size() == documents.size() && !hasActiveTransactions() ) { ProgressManager.checkCanceled(); myUpToDateIndicesForUnsavedOrTransactedDocuments.add(indexId); } } } } private boolean hasActiveTransactions() { return !myTransactionMap.isEmpty(); } private interface DocumentContent { @NotNull CharSequence getText(); long getModificationStamp(); } private static class AuthenticContent implements DocumentContent { private final Document myDocument; private AuthenticContent(final Document document) { myDocument = document; } @NotNull @Override public CharSequence getText() { return myDocument.getImmutableCharSequence(); } @Override public long getModificationStamp() { return myDocument.getModificationStamp(); } } private static class PsiContent implements DocumentContent { private final Document myDocument; private final PsiFile myFile; private PsiContent(final Document document, final PsiFile file) { myDocument = document; myFile = file; } @NotNull @Override public CharSequence getText() { if (myFile.getViewProvider().getModificationStamp() != myDocument.getModificationStamp()) { final ASTNode node = myFile.getNode(); assert node != null; return node.getChars(); } return myDocument.getImmutableCharSequence(); } @Override public long getModificationStamp() { return myFile.getViewProvider().getModificationStamp(); } } private static final Key<WeakReference<FileContentImpl>> ourFileContentKey = Key.create("unsaved.document.index.content"); // returns false if doc was not indexed because it is already up to date // return true if document was indexed // caller is responsible to ensure no concurrent same document processing private boolean indexUnsavedDocument(@NotNull final Document document, @NotNull final ID<?, ?> requestedIndexId, final Project project, @NotNull final VirtualFile vFile) { final PsiFile dominantContentFile = project == null ? null : findLatestKnownPsiForUncomittedDocument(document, project); final DocumentContent content; if (dominantContentFile != null && dominantContentFile.getViewProvider().getModificationStamp() != document.getModificationStamp()) { content = new PsiContent(document, dominantContentFile); } else { content = new AuthenticContent(document); } boolean psiBasedIndex = myPsiDependentIndices.contains(requestedIndexId); final long currentDocStamp = psiBasedIndex ? PsiDocumentManager.getInstance(project).getLastCommittedStamp(document) : content.getModificationStamp(); final long previousDocStamp = myLastIndexedDocStamps.get(document, requestedIndexId); if (previousDocStamp == currentDocStamp) return false; final CharSequence contentText = content.getText(); myFileTypeManager.freezeFileTypeTemporarilyIn(vFile, () -> { if (getAffectedIndexCandidates(vFile).contains(requestedIndexId) && getInputFilter(requestedIndexId).acceptInput(vFile)) { final int inputId = Math.abs(getFileId(vFile)); if (!isTooLarge(vFile, contentText.length())) { // Reasonably attempt to use same file content when calculating indices as we can evaluate them several at once and store in file content WeakReference<FileContentImpl> previousContentRef = document.getUserData(ourFileContentKey); FileContentImpl previousContent = com.intellij.reference.SoftReference.dereference(previousContentRef); final FileContentImpl newFc; if (previousContent != null && previousContent.getStamp() == currentDocStamp) { newFc = previousContent; } else { newFc = new FileContentImpl(vFile, contentText, currentDocStamp); if (IdIndex.ourSnapshotMappingsEnabled) { newFc.putUserData(UpdatableSnapshotInputMappingIndex.FORCE_IGNORE_MAPPING_INDEX_UPDATE, Boolean.TRUE); } document.putUserData(ourFileContentKey, new WeakReference<>(newFc)); } initFileContent(newFc, project, dominantContentFile); newFc.ensureThreadSafeLighterAST(); if (content instanceof AuthenticContent) { newFc.putUserData(PlatformIdTableBuilding.EDITOR_HIGHLIGHTER, EditorHighlighterCache.getEditorHighlighterForCachesBuilding(document)); } try { getIndex(requestedIndexId).update(inputId, newFc).compute(); } finally { cleanFileContent(newFc, dominantContentFile); } } else { // effectively wipe the data from the indices getIndex(requestedIndexId).update(inputId, null).compute(); } } long previousState = myLastIndexedDocStamps.set(document, requestedIndexId, currentDocStamp); assert previousState == previousDocStamp; }); return true; } private final StorageGuard myStorageLock = new StorageGuard(); private volatile boolean myPreviousDataBufferingState; private final Object myBufferingStateUpdateLock = new Object(); @ApiStatus.Experimental public void runCleanupAction(@NotNull Runnable cleanupAction) { Computable<Boolean> updateComputable = () -> { cleanupAction.run(); return true; }; runUpdate(false, updateComputable); runUpdate(true, updateComputable); } private boolean runUpdate(boolean transientInMemoryIndices, Computable<Boolean> update) { StorageGuard.StorageModeExitHandler storageModeExitHandler = myStorageLock.enter(transientInMemoryIndices); if (myPreviousDataBufferingState != transientInMemoryIndices) { synchronized (myBufferingStateUpdateLock) { if (myPreviousDataBufferingState != transientInMemoryIndices) { IndexConfiguration state = getState(); for (ID<?, ?> indexId : state.getIndexIDs()) { final UpdatableIndex index = state.getIndex(indexId); assert index != null; index.setBufferingEnabled(transientInMemoryIndices); } myPreviousDataBufferingState = transientInMemoryIndices; } } } try { return update.compute(); } finally { storageModeExitHandler.leave(); } } private void cleanupMemoryStorage(boolean skipPsiBasedIndices) { myLastIndexedDocStamps.clear(); IndexConfiguration state = myState; if (state == null) { // avoid waiting for end of indices initialization (IDEA-173382) // in memory content will appear on indexing (in read action) and here is event dispatch (write context) return; } for (ID<?, ?> indexId : state.getIndexIDs()) { if (skipPsiBasedIndices && myPsiDependentIndices.contains(indexId)) continue; final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); assert index != null; index.cleanupMemoryStorage(); } } @Override public void requestRebuild(@NotNull final ID<?, ?> indexId, final Throwable throwable) { if (!myExtensionsRelatedDataWasLoaded) { IndexInfrastructure.submitGenesisTask(() -> { waitUntilIndicesAreInitialized(); // should be always true here since the genesis pool is sequential doRequestRebuild(indexId, throwable); return null; }); } else { doRequestRebuild(indexId, throwable); } } private void doRequestRebuild(@NotNull ID<?, ?> indexId, Throwable throwable) { cleanupProcessedFlag(); if (!myExtensionsRelatedDataWasLoaded) reportUnexpectedAsyncInitState(); if (RebuildStatus.requestRebuild(indexId)) { String message = "Rebuild requested for index " + indexId; Application app = ApplicationManager.getApplication(); if (app.isUnitTestMode() && app.isReadAccessAllowed() && !app.isDispatchThread()) { // shouldn't happen in tests in general; so fail early with the exception that caused index to be rebuilt. // otherwise reindexing will fail anyway later, but with a much more cryptic assertion LOG.error(message, throwable); } else { LOG.info(message, throwable); } cleanupProcessedFlag(); if (!myInitialized) return; advanceIndexVersion(indexId); Runnable rebuildRunnable = () -> scheduleIndexRebuild("checkRebuild"); if (myIsUnitTestMode) { rebuildRunnable.run(); } else { // we do invoke later since we can have read lock acquired TransactionGuard.getInstance().submitTransactionLater(app, rebuildRunnable); } } } private static void reportUnexpectedAsyncInitState() { LOG.error("Unexpected async indices initialization problem"); } public <K, V> UpdatableIndex<K, V, FileContent> getIndex(ID<K, V> indexId) { return getState().getIndex(indexId); } private InputFilter getInputFilter(@NotNull ID<?, ?> indexId) { if (!myInitialized) { // 1. early vfs event that needs invalidation // 2. pushers that do synchronous indexing for contentless indices waitUntilIndicesAreInitialized(); } return getState().getInputFilter(indexId); } @NotNull Collection<VirtualFile> getFilesToUpdate(final Project project) { return ContainerUtil.filter(myChangedFilesCollector.getAllFilesToUpdate(), filesToBeIndexedForProjectCondition(project)::test); } @NotNull private Predicate<VirtualFile> filesToBeIndexedForProjectCondition(Project project) { return virtualFile -> { if (!virtualFile.isValid()) { return true; } for (IndexableFileSet set : myIndexableSets) { final Project proj = myIndexableSetToProjectMap.get(set); if (proj != null && !proj.equals(project)) { continue; // skip this set as associated with a different project } if (ReadAction.compute(() -> set.isInSet(virtualFile))) { return true; } } return false; }; } public boolean isFileUpToDate(VirtualFile file) { return !myChangedFilesCollector.isScheduledForUpdate(file); } // caller is responsible to ensure no concurrent same document processing void processRefreshedFile(@Nullable Project project, @NotNull final com.intellij.ide.caches.FileContent fileContent) { // ProcessCanceledException will cause re-adding the file to processing list final VirtualFile file = fileContent.getVirtualFile(); if (myChangedFilesCollector.isScheduledForUpdate(file)) { indexFileContent(project, fileContent); } } public void indexFileContent(@Nullable Project project, @NotNull com.intellij.ide.caches.FileContent content) { VirtualFile file = content.getVirtualFile(); final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); boolean setIndexedStatus = true; try { // if file was scheduled for update due to vfs events then it is present in myFilesToUpdate // in this case we consider that current indexing (out of roots backed CacheUpdater) will cover its content if (file.isValid() && content.getTimeStamp() != file.getTimeStamp()) { content = new com.intellij.ide.caches.FileContent(file); } if (!file.isValid() || isTooLarge(file)) { removeDataFromIndicesForFile(fileId, file); if (file instanceof DeletedVirtualFileStub && ((DeletedVirtualFileStub)file).isResurrected()) { doIndexFileContent(project, new com.intellij.ide.caches.FileContent(((DeletedVirtualFileStub)file).getOriginalFile())); } } else { setIndexedStatus = doIndexFileContent(project, content); } } finally { IndexingStamp.flushCache(fileId); } myChangedFilesCollector.removeFileIdFromFilesScheduledForUpdate(fileId); if (file instanceof VirtualFileSystemEntry && setIndexedStatus) ((VirtualFileSystemEntry)file).setFileIndexed(true); } private boolean doIndexFileContent(@Nullable Project project, @NotNull final com.intellij.ide.caches.FileContent content) { final VirtualFile file = content.getVirtualFile(); Ref<Boolean> setIndexedStatus = Ref.create(Boolean.TRUE); myFileTypeManager.freezeFileTypeTemporarilyIn(file, () -> { final FileType fileType = file.getFileType(); final Project finalProject = project == null ? ProjectUtil.guessProjectForFile(file) : project; PsiFile psiFile = null; FileContentImpl fc = null; int inputId = Math.abs(getFileId(file)); Set<ID<?, ?>> currentIndexedStates = new THashSet<>(IndexingStamp.getNontrivialFileIndexedStates(inputId)); final List<ID<?, ?>> affectedIndexCandidates = getAffectedIndexCandidates(file); //noinspection ForLoopReplaceableByForEach for (int i = 0, size = affectedIndexCandidates.size(); i < size; ++i) { final ID<?, ?> indexId = affectedIndexCandidates.get(i); if (shouldIndexFile(file, indexId)) { if (fc == null) { byte[] currentBytes; try { currentBytes = content.getBytes(); } catch (IOException e) { currentBytes = ArrayUtilRt.EMPTY_BYTE_ARRAY; } fc = new FileContentImpl(file, currentBytes); if (IdIndex.ourSnapshotMappingsEnabled) { FileType substituteFileType = SubstitutedFileType.substituteFileType(file, fileType, finalProject); byte[] hash = calculateHash(currentBytes, fc.getCharset(), fileType, substituteFileType); fc.setHash(hash); } psiFile = content.getUserData(IndexingDataKeys.PSI_FILE); initFileContent(fc, finalProject, psiFile); } try { ProgressManager.checkCanceled(); if (!updateSingleIndex(indexId, file, inputId, fc)) { setIndexedStatus.set(Boolean.FALSE); } currentIndexedStates.remove(indexId); } catch (ProcessCanceledException e) { cleanFileContent(fc, psiFile); throw e; } } } if (psiFile != null) { psiFile.putUserData(PsiFileImpl.BUILDING_STUB, null); } for(ID<?, ?> indexId:currentIndexedStates) { if(!getIndex(indexId).isIndexedStateForFile(inputId, file)) { ProgressManager.checkCanceled(); if (!updateSingleIndex(indexId, file, inputId, null)) { setIndexedStatus.set(Boolean.FALSE); } } } }); return setIndexedStatus.get(); } @NotNull public static byte[] calculateHash(@NotNull byte[] currentBytes, @NotNull Charset charset, @NotNull FileType fileType, @NotNull FileType substituteFileType) { return fileType.isBinary() ? ContentHashesSupport.calcContentHash(currentBytes, substituteFileType) : ContentHashesSupport.calcContentHashWithFileType(currentBytes, charset, substituteFileType); } public boolean isIndexingCandidate(@NotNull VirtualFile file, @NotNull ID<?, ?> indexId) { return !isTooLarge(file) && getAffectedIndexCandidates(file).contains(indexId); } @NotNull private List<ID<?, ?>> getAffectedIndexCandidates(@NotNull VirtualFile file) { if (file.isDirectory()) { return isProjectOrWorkspaceFile(file, null) ? Collections.emptyList() : myIndicesForDirectories; } FileType fileType = file.getFileType(); if(isProjectOrWorkspaceFile(file, fileType)) return Collections.emptyList(); return getState().getFileTypesForIndex(fileType); } private static void cleanFileContent(@NotNull FileContentImpl fc, PsiFile psiFile) { if (psiFile != null) psiFile.putUserData(PsiFileImpl.BUILDING_STUB, null); fc.putUserData(IndexingDataKeys.PSI_FILE, null); } private static void initFileContent(@NotNull FileContentImpl fc, Project project, PsiFile psiFile) { if (psiFile != null) { psiFile.putUserData(PsiFileImpl.BUILDING_STUB, true); fc.putUserData(IndexingDataKeys.PSI_FILE, psiFile); } fc.putUserData(IndexingDataKeys.PROJECT, project); } private boolean updateSingleIndex(@NotNull ID<?, ?> indexId, VirtualFile file, final int inputId, @Nullable FileContent currentFC) { if (!myExtensionsRelatedDataWasLoaded) reportUnexpectedAsyncInitState(); if (!RebuildStatus.isOk(indexId) && !myIsUnitTestMode) { return false; // the index is scheduled for rebuild, no need to update } myLocalModCount.incrementAndGet(); final UpdatableIndex<?, ?, FileContent> index = getIndex(indexId); assert index != null; boolean hasContent = currentFC != null; if (ourIndexedFile.get() != null) throw new AssertionError("Reentrant indexing"); ourIndexedFile.set(file); boolean updateCalculated = false; try { // important: no hard referencing currentFC to avoid OOME, the methods introduced for this purpose! // important: update is called out of try since possible indexer extension is HANDLED as single file fail / restart indexing policy final Computable<Boolean> update = index.update(inputId, currentFC); updateCalculated = true; scheduleUpdate(indexId, update, file, inputId, hasContent); } catch (RuntimeException exception) { Throwable causeToRebuildIndex = getCauseToRebuildIndex(exception); if (causeToRebuildIndex != null && (updateCalculated || causeToRebuildIndex instanceof IOException)) { requestRebuild(indexId, exception); return false; } throw exception; } finally { ourIndexedFile.remove(); } return true; } @Override public VirtualFile getFileBeingCurrentlyIndexed() { return ourIndexedFile.get(); } private class VirtualFileUpdateTask extends UpdateTask<VirtualFile> { @Override void doProcess(VirtualFile item, Project project) { processRefreshedFile(project, new com.intellij.ide.caches.FileContent(item)); } } private final VirtualFileUpdateTask myForceUpdateTask = new VirtualFileUpdateTask(); private final AtomicInteger myForceUpdateRequests = new AtomicInteger(); private void forceUpdate(@Nullable Project project, @Nullable final GlobalSearchScope filter, @Nullable final VirtualFile restrictedTo) { Collection<VirtualFile> allFilesToUpdate = myChangedFilesCollector.getAllFilesToUpdate(); if (!allFilesToUpdate.isEmpty()) { boolean includeFilesFromOtherProjects = restrictedTo == null && (myForceUpdateRequests.incrementAndGet() & 0x3F) == 0; List<VirtualFile> virtualFilesToBeUpdatedForProject = ContainerUtil.filter( allFilesToUpdate, new ProjectFilesCondition(projectIndexableFiles(project), filter, restrictedTo, includeFilesFromOtherProjects) ); if (!virtualFilesToBeUpdatedForProject.isEmpty()) { myForceUpdateTask.processAll(virtualFilesToBeUpdatedForProject, project); } } } private final Lock myReadLock; private final Lock myWriteLock; { ReadWriteLock lock = new ReentrantReadWriteLock(); myReadLock = lock.readLock(); myWriteLock = lock.writeLock(); } private void scheduleUpdate(@NotNull final ID<?, ?> indexId, @NotNull Computable<Boolean> update, VirtualFile file, final int inputId, final boolean hasContent) { if (runUpdate(false, update)) { ConcurrencyUtil.withLock(myReadLock, ()->{ UpdatableIndex<?, ?, FileContent> index = getIndex(indexId); if (hasContent) { index.setIndexedStateForFile(inputId, file); } else { index.resetIndexedStateForFile(inputId); } }); } } private boolean needsFileContentLoading(@NotNull ID<?, ?> indexId) { return !myNotRequiringContentIndices.contains(indexId); } @Nullable private IndexableFileSet getIndexableSetForFile(VirtualFile file) { for (IndexableFileSet set : myIndexableSets) { if (set.isInSet(file)) { return set; } } return null; } private void doTransientStateChangeForFile(int fileId, @NotNull VirtualFile file) { waitUntilIndicesAreInitialized(); if (!clearUpToDateStateForPsiIndicesOfUnsavedDocuments(file, IndexingStamp.getNontrivialFileIndexedStates(fileId))) { // change in persistent file clearUpToDateStateForPsiIndicesOfVirtualFile(file); } } private void doInvalidateIndicesForFile(int fileId, @NotNull VirtualFile file, boolean contentChanged) { waitUntilIndicesAreInitialized(); cleanProcessedFlag(file); List<ID<?, ?>> nontrivialFileIndexedStates = IndexingStamp.getNontrivialFileIndexedStates(fileId); Collection<ID<?, ?>> fileIndexedStatesToUpdate = ContainerUtil.intersection(nontrivialFileIndexedStates, myRequiringContentIndices); if (contentChanged) { // only mark the file as outdated, reindex will be done lazily if (!fileIndexedStatesToUpdate.isEmpty()) { //noinspection ForLoopReplaceableByForEach for (int i = 0, size = nontrivialFileIndexedStates.size(); i < size; ++i) { final ID<?, ?> indexId = nontrivialFileIndexedStates.get(i); if (needsFileContentLoading(indexId)) { getIndex(indexId).resetIndexedStateForFile(fileId); } } // transient index value can depend on disk value because former is diff to latter removeTransientFileDataFromIndices(nontrivialFileIndexedStates, fileId, file); // the file is for sure not a dir and it was previously indexed by at least one index if (file.isValid()) { if(!isTooLarge(file)) myChangedFilesCollector.scheduleForUpdate(file); else myChangedFilesCollector.scheduleForUpdate(new DeletedVirtualFileStub((VirtualFileWithId)file)); } else { LOG.info("Unexpected state in update:" + file); } } } else { // file was removed for (ID<?, ?> indexId : nontrivialFileIndexedStates) { if (myNotRequiringContentIndices.contains(indexId)) { updateSingleIndex(indexId, null, fileId, null); } } if(!fileIndexedStatesToUpdate.isEmpty()) { // its data should be (lazily) wiped for every index myChangedFilesCollector.scheduleForUpdate(new DeletedVirtualFileStub((VirtualFileWithId)file)); } else { myChangedFilesCollector.removeScheduledFileFromUpdate(file); // no need to update it anymore } } } private void scheduleFileForIndexing(int fileId, @NotNull VirtualFile file, boolean contentChange) { // handle 'content-less' indices separately boolean fileIsDirectory = file.isDirectory(); if (!contentChange) { FileContent fileContent = null; for (ID<?, ?> indexId : fileIsDirectory ? myIndicesForDirectories : myNotRequiringContentIndices) { if (getInputFilter(indexId).acceptInput(file)) { if (fileContent == null) { fileContent = new FileContentImpl(file); } updateSingleIndex(indexId, file, fileId, fileContent); } } } // For 'normal indices' schedule the file for update and reset stamps for all affected indices (there // can be client that used indices between before and after events, in such case indices are up to date due to force update // with old content) if (!fileIsDirectory) { if (!file.isValid() || isTooLarge(file)) { // large file might be scheduled for update in before event when its size was not large myChangedFilesCollector.removeScheduledFileFromUpdate(file); } else { myFileTypeManager.freezeFileTypeTemporarilyIn(file, () -> { final List<ID<?, ?>> candidates = getAffectedIndexCandidates(file); boolean scheduleForUpdate = false; //noinspection ForLoopReplaceableByForEach for (int i = 0, size = candidates.size(); i < size; ++i) { final ID<?, ?> indexId = candidates.get(i); if (needsFileContentLoading(indexId) && getInputFilter(indexId).acceptInput(file)) { getIndex(indexId).resetIndexedStateForFile(fileId); scheduleForUpdate = true; } } if (scheduleForUpdate) { IndexingStamp.flushCache(fileId); myChangedFilesCollector.scheduleForUpdate(file); } else if (file instanceof VirtualFileSystemEntry) { ((VirtualFileSystemEntry)file).setFileIndexed(true); } }); } } } private final class ChangedFilesCollector extends IndexedFilesListener { private final IntObjectMap<VirtualFile> myFilesToUpdate = ContainerUtil.createConcurrentIntObjectMap(); private final AtomicInteger myProcessedEventIndex = new AtomicInteger(); private final Phaser myWorkersFinishedSync = new Phaser() { @Override protected boolean onAdvance(int phase, int registeredParties) { return false; } }; private final Executor myVfsEventsExecutor = SequentialTaskExecutor.createSequentialApplicationPoolExecutor("FileBasedIndex Vfs Event Processor"); private final AtomicInteger myScheduledVfsEventsWorkers = new AtomicInteger(); ChangedFilesCollector(@NotNull ManagingFS managingFS) { super(managingFS); } @Override protected void buildIndicesForFileRecursively(@NotNull VirtualFile file, boolean contentChange) { cleanProcessedFlag(file); if (!contentChange) { myUpdatingFiles.incrementAndGet(); } super.buildIndicesForFileRecursively(file, contentChange); if (!contentChange) { if (myUpdatingFiles.decrementAndGet() == 0) { myFilesModCount.incrementAndGet(); } } } @Override protected void iterateIndexableFiles(@NotNull VirtualFile file, @NotNull ContentIterator iterator) { for (IndexableFileSet set : myIndexableSets) { if (set.isInSet(file)) { set.iterateIndexableFilesIn(file, iterator); } } } void scheduleForUpdate(VirtualFile file) { if (!(file instanceof DeletedVirtualFileStub)) { IndexableFileSet setForFile = getIndexableSetForFile(file); if (setForFile == null) { return; } } final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); final VirtualFile previousVirtualFile = myFilesToUpdate.put(fileId, file); if (previousVirtualFile instanceof DeletedVirtualFileStub && !previousVirtualFile.equals(file)) { assert ((DeletedVirtualFileStub)previousVirtualFile).getOriginalFile().equals(file); ((DeletedVirtualFileStub)previousVirtualFile).setResurrected(true); myFilesToUpdate.put(fileId, previousVirtualFile); } } private void removeScheduledFileFromUpdate(VirtualFile file) { final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); final VirtualFile previousVirtualFile = myFilesToUpdate.remove(fileId); if (previousVirtualFile instanceof DeletedVirtualFileStub) { assert ((DeletedVirtualFileStub)previousVirtualFile).getOriginalFile().equals(file); ((DeletedVirtualFileStub)previousVirtualFile).setResurrected(false); myFilesToUpdate.put(fileId, previousVirtualFile); } } private void removeFileIdFromFilesScheduledForUpdate(int fileId) { myFilesToUpdate.remove(fileId); } Collection<VirtualFile> getAllFilesToUpdate() { ensureUpToDate(); if (myFilesToUpdate.isEmpty()) { return Collections.emptyList(); } return new ArrayList<>(myFilesToUpdate.values()); } @Override @NotNull public AsyncFileListener.ChangeApplier prepareChange(@NotNull List<? extends VFileEvent> events) { boolean shouldCleanup = ContainerUtil.exists(events, this::memoryStorageCleaningNeeded); ChangeApplier superApplier = super.prepareChange(events); return new AsyncFileListener.ChangeApplier() { @Override public void beforeVfsChange() { if (shouldCleanup) { cleanupMemoryStorage(false); } superApplier.beforeVfsChange(); } @Override public void afterVfsChange() { superApplier.afterVfsChange(); if (myInitialized) ensureUpToDateAsync(); } }; } private boolean memoryStorageCleaningNeeded(VFileEvent event) { Object requestor = event.getRequestor(); return requestor instanceof FileDocumentManager || requestor instanceof PsiManager || requestor == LocalHistory.VFS_EVENT_REQUESTOR; } boolean isScheduledForUpdate(VirtualFile file) { return myFilesToUpdate.containsKey(Math.abs(getIdMaskingNonIdBasedFile(file))); } void ensureUpToDate() { if (!isUpToDateCheckEnabled()) { return; } //assert ApplicationManager.getApplication().isReadAccessAllowed() || ShutDownTracker.isShutdownHookRunning(); waitUntilIndicesAreInitialized(); if (ApplicationManager.getApplication().isReadAccessAllowed()) { processFilesInReadAction(); } else { processFilesInReadActionWithYieldingToWriteAction(); } } void ensureUpToDateAsync() { if (getEventMerger().getApproximateChangesCount() >= 20 && myScheduledVfsEventsWorkers.compareAndSet(0,1)) { myVfsEventsExecutor.execute(this::scheduledEventProcessingInReadActionWithYieldingToWriteAction); if (Registry.is("try.starting.dumb.mode.where.many.files.changed")) { Runnable startDumbMode = () -> { for (Project project : ProjectManager.getInstance().getOpenProjects()) { DumbServiceImpl dumbService = DumbServiceImpl.getInstance(project); DumbModeTask task = FileBasedIndexProjectHandler.createChangedFilesIndexingTask(project); if (task != null) { dumbService.queueTask(task); } } }; Application app = ApplicationManager.getApplication(); if (!app.isHeadlessEnvironment() /*avoid synchronous ensureUpToDate to prevent deadlock*/ && app.isDispatchThread() && !LaterInvocator.isInModalContext()) { startDumbMode.run(); } else { app.invokeLater(startDumbMode, ModalityState.NON_MODAL); } } } } private void processFilesInReadAction() { assert ApplicationManager.getApplication().isReadAccessAllowed(); // no vfs events -> event processing code can finish int publishedEventIndex = getEventMerger().getPublishedEventIndex(); int processedEventIndex = myProcessedEventIndex.get(); if (processedEventIndex == publishedEventIndex) { return; } myWorkersFinishedSync.register(); int phase = myWorkersFinishedSync.getPhase(); try { getEventMerger().processChanges(info -> ConcurrencyUtil.withLock(myWriteLock, () -> { try { ProgressManager.getInstance().executeNonCancelableSection(() -> { int fileId = info.getFileId(); VirtualFile file = info.getFile(); if (info.isTransientStateChanged()) doTransientStateChangeForFile(fileId, file); if (info.isBeforeContentChanged()) FileBasedIndexImpl.this.doInvalidateIndicesForFile(fileId, file, true); if (info.isContentChanged()) scheduleFileForIndexing(fileId, file, true); if (info.isFileRemoved()) FileBasedIndexImpl.this.doInvalidateIndicesForFile(fileId, file, false); if (info.isFileAdded()) scheduleFileForIndexing(fileId, file, false); }); } finally { IndexingStamp.flushCache(info.getFileId()); } return true; }) ); } finally { myWorkersFinishedSync.arriveAndDeregister(); } myWorkersFinishedSync.awaitAdvance(phase); if (getEventMerger().getPublishedEventIndex() == publishedEventIndex) { myProcessedEventIndex.compareAndSet(processedEventIndex, publishedEventIndex); } } private void processFilesInReadActionWithYieldingToWriteAction() { while (getEventMerger().hasChanges()) { if (!ProgressIndicatorUtils.runInReadActionWithWriteActionPriority(this::processFilesInReadAction)) { ProgressIndicatorUtils.yieldToPendingWriteActions(); } } } private void scheduledEventProcessingInReadActionWithYieldingToWriteAction() { try { processFilesInReadActionWithYieldingToWriteAction(); } finally { myScheduledVfsEventsWorkers.decrementAndGet(); } } } private boolean clearUpToDateStateForPsiIndicesOfUnsavedDocuments(@NotNull VirtualFile file, Collection<? extends ID<?, ?>> affectedIndices) { if (!myUpToDateIndicesForUnsavedOrTransactedDocuments.isEmpty()) { myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } Document document = myFileDocumentManager.getCachedDocument(file); if (document != null && myFileDocumentManager.isDocumentUnsaved(document)) { // will be reindexed in indexUnsavedDocuments myLastIndexedDocStamps.clearForDocument(document); // Q: non psi indices document.putUserData(ourFileContentKey, null); return true; } removeTransientFileDataFromIndices(ContainerUtil.intersection(affectedIndices, myPsiDependentIndices), getFileId(file), file); return false; } static int getIdMaskingNonIdBasedFile(@NotNull VirtualFile file) { return file instanceof VirtualFileWithId ?((VirtualFileWithId)file).getId() : IndexingStamp.INVALID_FILE_ID; } private class UnindexedFilesFinder implements CollectingContentIterator { private final List<VirtualFile> myFiles = new ArrayList<>(); private final boolean myDoTraceForFilesToBeIndexed = LOG.isTraceEnabled(); @NotNull @Override public List<VirtualFile> getFiles() { List<VirtualFile> files; synchronized (myFiles) { files = myFiles; } // When processing roots concurrently myFiles looses the local order of local vs archive files // If we process the roots in 2 threads we can just separate local vs archive // IMPORTANT: also remove duplicated file that can appear due to roots intersection BitSet usedFileIds = new BitSet(files.size()); List<VirtualFile> localFileSystemFiles = new ArrayList<>(files.size() / 2); List<VirtualFile> archiveFiles = new ArrayList<>(files.size() / 2); for(VirtualFile file:files) { int fileId = ((VirtualFileWithId)file).getId(); if (usedFileIds.get(fileId)) continue; usedFileIds.set(fileId); if (file.getFileSystem() instanceof LocalFileSystem) localFileSystemFiles.add(file); else archiveFiles.add(file); } localFileSystemFiles.addAll(archiveFiles); return localFileSystemFiles; } @Override public boolean processFile(@NotNull final VirtualFile file) { return ReadAction.compute(() -> { if (!file.isValid()) { return true; } if (file instanceof VirtualFileSystemEntry && ((VirtualFileSystemEntry)file).isFileIndexed()) { return true; } if (!(file instanceof VirtualFileWithId)) { return true; } myFileTypeManager.freezeFileTypeTemporarilyIn(file, () -> { boolean isUptoDate = true; boolean isDirectory = file.isDirectory(); if (!isDirectory && !isTooLarge(file)) { final List<ID<?, ?>> affectedIndexCandidates = getAffectedIndexCandidates(file); //noinspection ForLoopReplaceableByForEach for (int i = 0, size = affectedIndexCandidates.size(); i < size; ++i) { final ID<?, ?> indexId = affectedIndexCandidates.get(i); try { if (needsFileContentLoading(indexId) && shouldIndexFile(file, indexId)) { if (myDoTraceForFilesToBeIndexed) { LOG.trace("Scheduling indexing of " + file + " by request of index " + indexId); } synchronized (myFiles) { myFiles.add(file); } isUptoDate = false; break; } } catch (RuntimeException e) { final Throwable cause = e.getCause(); if (cause instanceof IOException || cause instanceof StorageException) { LOG.info(e); requestRebuild(indexId); } else { throw e; } } } } FileContent fileContent = null; int inputId = Math.abs(getIdMaskingNonIdBasedFile(file)); for (ID<?, ?> indexId : isDirectory ? myIndicesForDirectories : myNotRequiringContentIndices) { if (shouldIndexFile(file, indexId)) { if (fileContent == null) { fileContent = new FileContentImpl(file); } updateSingleIndex(indexId, file, inputId, fileContent); } } IndexingStamp.flushCache(inputId); if (isUptoDate && file instanceof VirtualFileSystemEntry) { ((VirtualFileSystemEntry)file).setFileIndexed(true); } }); ProgressManager.checkCanceled(); return true; }); } } private boolean shouldIndexFile(@NotNull VirtualFile file, @NotNull ID<?, ?> indexId) { return getInputFilter(indexId).acceptInput(file) && (isMock(file) || !getIndex(indexId).isIndexedStateForFile(((NewVirtualFile)file).getId(), file)); } static boolean isMock(final VirtualFile file) { return !(file instanceof NewVirtualFile); } private boolean isTooLarge(@NotNull VirtualFile file) { if (SingleRootFileViewProvider.isTooLargeForIntelligence(file)) { return !myNoLimitCheckTypes.contains(file.getFileType()) || SingleRootFileViewProvider.isTooLargeForContentLoading(file); } return false; } private boolean isTooLarge(@NotNull VirtualFile file, long contentSize) { if (SingleRootFileViewProvider.isTooLargeForIntelligence(file, contentSize)) { return !myNoLimitCheckTypes.contains(file.getFileType()) || SingleRootFileViewProvider.isTooLargeForContentLoading(file, contentSize); } return false; } @NotNull CollectingContentIterator createContentIterator(@Nullable ProgressIndicator indicator) { return new UnindexedFilesFinder(); } @Override public void registerIndexableSet(@NotNull IndexableFileSet set, @Nullable Project project) { myIndexableSets.add(set); myIndexableSetToProjectMap.put(set, project); if (project != null) { ((PsiManagerImpl)PsiManager.getInstance(project)).addTreeChangePreprocessor(event -> { if (event.isGenericChange() && event.getCode() == PsiTreeChangeEventImpl.PsiEventType.CHILDREN_CHANGED) { PsiFile file = event.getFile(); if (file != null) { VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile instanceof VirtualFileWithId) { myChangedFilesCollector.getEventMerger().recordTransientStateChangeEvent(virtualFile); } } } }); } } private void clearUpToDateStateForPsiIndicesOfVirtualFile(VirtualFile virtualFile) { if (virtualFile instanceof VirtualFileWithId) { int fileId = ((VirtualFileWithId)virtualFile).getId(); boolean wasIndexed = false; List<ID<?, ?>> candidates = getAffectedIndexCandidates(virtualFile); for (ID<?, ?> candidate : candidates) { if (myPsiDependentIndices.contains(candidate)) { if(getInputFilter(candidate).acceptInput(virtualFile)) { getIndex(candidate).resetIndexedStateForFile(fileId); wasIndexed = true; } } } if (wasIndexed) { myChangedFilesCollector.scheduleForUpdate(virtualFile); IndexingStamp.flushCache(fileId); } } } @Override public void removeIndexableSet(@NotNull IndexableFileSet set) { if (!myIndexableSetToProjectMap.containsKey(set)) return; myIndexableSets.remove(set); myIndexableSetToProjectMap.remove(set); for (VirtualFile file : myChangedFilesCollector.getAllFilesToUpdate()) { final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); if (!file.isValid()) { removeDataFromIndicesForFile(fileId, file); myChangedFilesCollector.removeFileIdFromFilesScheduledForUpdate(fileId); } else if (getIndexableSetForFile(file) == null) { // todo remove data from indices for removed myChangedFilesCollector.removeFileIdFromFilesScheduledForUpdate(fileId); } } IndexingStamp.flushCaches(); } @Override public VirtualFile findFileById(Project project, int id) { return IndexInfrastructure.findFileById((PersistentFS)ManagingFS.getInstance(), id); } @Nullable private static PsiFile findLatestKnownPsiForUncomittedDocument(@NotNull Document doc, @NotNull Project project) { return PsiDocumentManager.getInstance(project).getCachedPsiFile(doc); } private static void cleanupProcessedFlag() { final VirtualFile[] roots = ManagingFS.getInstance().getRoots(); for (VirtualFile root : roots) { cleanProcessedFlag(root); } } private static void cleanProcessedFlag(@NotNull final VirtualFile file) { if (!(file instanceof VirtualFileSystemEntry)) return; final VirtualFileSystemEntry nvf = (VirtualFileSystemEntry)file; if (file.isDirectory()) { nvf.setFileIndexed(false); for (VirtualFile child : nvf.getCachedChildren()) { cleanProcessedFlag(child); } } else { nvf.setFileIndexed(false); } } @Override public void iterateIndexableFilesConcurrently(@NotNull ContentIterator processor, @NotNull Project project, @NotNull ProgressIndicator indicator) { PushedFilePropertiesUpdaterImpl.invokeConcurrentlyIfPossible(collectScanRootRunnables(processor, project, indicator)); } @Override public void iterateIndexableFiles(@NotNull final ContentIterator processor, @NotNull final Project project, final ProgressIndicator indicator) { for(Runnable r: collectScanRootRunnables(processor, project, indicator)) r.run(); } @NotNull private static List<Runnable> collectScanRootRunnables(@NotNull final ContentIterator processor, @NotNull final Project project, final ProgressIndicator indicator) { FileBasedIndexScanRunnableCollector collector = FileBasedIndexScanRunnableCollector.getInstance(project); return collector.collectScanRootRunnables(processor, indicator); } private final class DocumentUpdateTask extends UpdateTask<Document> { private final ID<?, ?> myIndexId; DocumentUpdateTask(ID<?, ?> indexId) { myIndexId = indexId; } @Override void doProcess(Document document, Project project) { indexUnsavedDocument(document, myIndexId, project, myFileDocumentManager.getFile(document)); } } private class FileIndexDataInitialization extends IndexInfrastructure.DataInitialization<IndexConfiguration> { private final IndexConfiguration state = new IndexConfiguration(); private final Set<ID> versionChangedIndexes = ContainerUtil.newConcurrentSet(); private boolean currentVersionCorrupted; private SerializationManagerEx mySerializationManagerEx; private void initAssociatedDataForExtensions() { long started = System.nanoTime(); Iterator<FileBasedIndexExtension> extensions = IndexInfrastructure.hasIndices() ? ((ExtensionPointImpl<FileBasedIndexExtension>)FileBasedIndexExtension.EXTENSION_POINT_NAME.getPoint(null)).iterator() : Collections.emptyIterator(); // todo: init contentless indices first ? while (extensions.hasNext()) { FileBasedIndexExtension<?, ?> extension = extensions.next(); if (extension == null) break; ID<?, ?> name = extension.getName(); RebuildStatus.registerIndex(name); myUnsavedDataUpdateTasks.put(name, new DocumentUpdateTask(name)); if (!extension.dependsOnFileContent()) { if (extension.indexDirectories()) myIndicesForDirectories.add(name); myNotRequiringContentIndices.add(name); } else { myRequiringContentIndices.add(name); } if (isPsiDependentIndex(extension)) myPsiDependentIndices.add(name); myNoLimitCheckTypes.addAll(extension.getFileTypesWithSizeLimitNotApplicable()); addNestedInitializationTask(() -> { try { if (registerIndexer(extension, state)) { versionChangedIndexes.add(extension.getName()); } } catch (IOException io) { throw io; } catch (Throwable t) { PluginManager.handleComponentError(t, extension.getClass().getName(), null); } }); } myExtensionsRelatedDataWasLoaded = true; LOG.info("File index extensions iterated:" + (System.nanoTime() - started) / 1000000); } @Override protected void prepare() { initAssociatedDataForExtensions(); mySerializationManagerEx = SerializationManagerEx.getInstanceEx(); File indexRoot = PathManager.getIndexRoot(); PersistentIndicesConfiguration.loadConfiguration(); final File corruptionMarker = new File(indexRoot, CORRUPTION_MARKER_NAME); currentVersionCorrupted = IndexInfrastructure.hasIndices() && corruptionMarker.exists(); if (currentVersionCorrupted) { FileUtil.deleteWithRenaming(indexRoot); indexRoot.mkdirs(); // serialization manager is initialized before and use removed index root so we need to reinitialize it mySerializationManagerEx.reinitializeNameStorage(); ID.reinitializeDiskStorage(); PersistentIndicesConfiguration.saveConfiguration(); FileUtil.delete(corruptionMarker); } } @Override protected void onThrowable(@NotNull Throwable t) { LOG.error(t); } @Override protected IndexConfiguration finish() { try { state.finalizeFileTypeMappingForIndices(); String rebuildNotification = null; if (currentVersionCorrupted) { rebuildNotification = "Index files on disk are corrupted. Indices will be rebuilt."; } else if (!versionChangedIndexes.isEmpty()) { String changedIndexesText = versionChangedIndexes.stream().map(id -> id.getName()).collect(Collectors.joining(", ")); rebuildNotification = "Index file format has changed for " + changedIndexesText + " indices. These indices will be rebuilt."; } if (rebuildNotification != null && !ApplicationManager.getApplication().isHeadlessEnvironment() && Registry.is("ide.showIndexRebuildMessage")) { NOTIFICATIONS.createNotification("Index Rebuild", rebuildNotification, NotificationType.INFORMATION, null).notify(null); } state.freeze(); myState = state; // memory barrier // check if rebuild was requested for any index during registration for (ID<?, ?> indexId : state.getIndexIDs()) { try { RebuildStatus.clearIndexIfNecessary(indexId, () -> clearIndex(indexId)); } catch (StorageException e) { requestRebuild(indexId); LOG.error(e); } } registerIndexableSet(new AdditionalIndexableFileSet(), null); return state; } finally { ShutDownTracker.getInstance().registerShutdownTask(FileBasedIndexImpl.this::performShutdown); saveRegisteredIndicesAndDropUnregisteredOnes(state.getIndexIDs()); myFlushingFuture = FlushingDaemon.everyFiveSeconds(new Runnable() { private int lastModCount; @Override public void run() { mySerializationManagerEx.flushNameStorage(); int currentModCount = myLocalModCount.get(); if (lastModCount == currentModCount) { flushAllIndices(lastModCount); } lastModCount = currentModCount; } }); myAllIndicesInitializedFuture = IndexInfrastructure.submitGenesisTask(() -> { if (!myShutdownPerformed.get()) { myChangedFilesCollector.ensureUpToDateAsync(); } return null; }); myInitialized = true; // this will ensure that all changes to component's state will be visible to other threads } } } @Override public void invalidateCaches() { File indexRoot = PathManager.getIndexRoot(); LOG.info("Requesting explicit indices invalidation", new Throwable()); try { final File corruptionMarker = new File(indexRoot, CORRUPTION_MARKER_NAME); //noinspection IOResourceOpenedButNotSafelyClosed new FileOutputStream(corruptionMarker).close(); } catch (Throwable ignore) { } } @TestOnly public void waitForVfsEventsExecuted(long timeout, @NotNull TimeUnit unit) throws Exception { ApplicationManager.getApplication().assertIsDispatchThread(); long deadline = System.nanoTime() + unit.toNanos(timeout); while (System.nanoTime() < deadline) { try { ((BoundedTaskExecutor)myChangedFilesCollector.myVfsEventsExecutor).waitAllTasksExecuted(100, TimeUnit.MILLISECONDS); return; } catch (TimeoutException e) { UIUtil.dispatchAllInvocationEvents(); } } } public synchronized FileContentHashIndex getFileContentHashIndex(@NotNull File enumeratorPath) { UpdatableIndex<Integer, Void, FileContent> index = getState().getIndex(FileContentHashIndexExtension.HASH_INDEX_ID); if (index == null) { try { registerIndexer(FileContentHashIndexExtension.create(enumeratorPath, this), myState); } catch (IOException e) { throw new RuntimeException(e); } } else return (FileContentHashIndex)index; return (FileContentHashIndex)getState().getIndex(FileContentHashIndexExtension.HASH_INDEX_ID); } private static final boolean INDICES_ARE_PSI_DEPENDENT_BY_DEFAULT = SystemProperties.getBooleanProperty("idea.indices.psi.dependent.default", true); static boolean isPsiDependentIndex(@NotNull IndexExtension<?, ?, ?> extension) { if (INDICES_ARE_PSI_DEPENDENT_BY_DEFAULT) { return extension instanceof FileBasedIndexExtension && ((FileBasedIndexExtension<?, ?>)extension).dependsOnFileContent() && !(extension instanceof DocumentChangeDependentIndex); } else { return extension instanceof PsiDependentIndex; } } }
platform/lang-impl/src/com/intellij/util/indexing/FileBasedIndexImpl.java
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.indexing; import com.google.common.annotations.VisibleForTesting; import com.intellij.AppTopics; import com.intellij.history.LocalHistory; import com.intellij.ide.plugins.PluginManager; import com.intellij.ide.startup.ServiceNotReadyException; import com.intellij.lang.ASTNode; import com.intellij.notification.NotificationDisplayType; import com.intellij.notification.NotificationGroup; import com.intellij.notification.NotificationType; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.ex.ActionUtil; import com.intellij.openapi.application.*; import com.intellij.openapi.application.impl.LaterInvocator; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.impl.EditorHighlighterCache; import com.intellij.openapi.extensions.impl.ExtensionPointImpl; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileDocumentManagerListener; import com.intellij.openapi.fileTypes.*; import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.impl.BackgroundableProcessIndicator; import com.intellij.openapi.progress.util.ProgressIndicatorUtils; import com.intellij.openapi.project.*; import com.intellij.openapi.roots.CollectingContentIterator; import com.intellij.openapi.roots.ContentIterator; import com.intellij.openapi.roots.impl.PushedFilePropertiesUpdaterImpl; import com.intellij.openapi.util.*; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.newvfs.ManagingFS; import com.intellij.openapi.vfs.newvfs.NewVirtualFile; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry; import com.intellij.openapi.vfs.newvfs.persistent.FlushingDaemon; import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.SingleRootFileViewProvider; import com.intellij.psi.impl.PsiDocumentTransactionListener; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.PsiTreeChangeEventImpl; import com.intellij.psi.impl.cache.impl.id.IdIndex; import com.intellij.psi.impl.cache.impl.id.PlatformIdTableBuilding; import com.intellij.psi.impl.source.PsiFileImpl; import com.intellij.psi.search.EverythingGlobalScope; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.stubs.SerializationManagerEx; import com.intellij.util.*; import com.intellij.util.concurrency.BoundedTaskExecutor; import com.intellij.util.concurrency.SequentialTaskExecutor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.IntObjectMap; import com.intellij.util.gist.GistManager; import com.intellij.util.indexing.hash.FileContentHashIndex; import com.intellij.util.indexing.hash.FileContentHashIndexExtension; import com.intellij.util.indexing.impl.InvertedIndexValueIterator; import com.intellij.util.indexing.provided.ProvidedIndexExtension; import com.intellij.util.indexing.provided.ProvidedIndexExtensionLocator; import com.intellij.util.io.DataOutputStream; import com.intellij.util.io.IOUtil; import com.intellij.util.io.storage.HeavyProcessLatch; import com.intellij.util.messages.MessageBus; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.UIUtil; import gnu.trove.THashMap; import gnu.trove.THashSet; import gnu.trove.TIntArrayList; import gnu.trove.TIntHashSet; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.*; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; /** * @author Eugene Zhuravlev */ public final class FileBasedIndexImpl extends FileBasedIndex implements Disposable { private static final ThreadLocal<VirtualFile> ourIndexedFile = new ThreadLocal<>(); static final Logger LOG = Logger.getInstance("#com.intellij.util.indexing.FileBasedIndexImpl"); private static final String CORRUPTION_MARKER_NAME = "corruption.marker"; private static final NotificationGroup NOTIFICATIONS = new NotificationGroup("Indexing", NotificationDisplayType.BALLOON, false); private final List<ID<?, ?>> myIndicesForDirectories = new SmartList<>(); private final Map<ID<?, ?>, DocumentUpdateTask> myUnsavedDataUpdateTasks = new ConcurrentHashMap<>(); private final Set<ID<?, ?>> myNotRequiringContentIndices = new THashSet<>(); private final Set<ID<?, ?>> myRequiringContentIndices = new THashSet<>(); private final Set<ID<?, ?>> myPsiDependentIndices = new THashSet<>(); private final Set<FileType> myNoLimitCheckTypes = new THashSet<>(); private volatile boolean myExtensionsRelatedDataWasLoaded; private final PerIndexDocumentVersionMap myLastIndexedDocStamps = new PerIndexDocumentVersionMap(); @NotNull private final ChangedFilesCollector myChangedFilesCollector; private final List<IndexableFileSet> myIndexableSets = ContainerUtil.createLockFreeCopyOnWriteList(); private final Map<IndexableFileSet, Project> myIndexableSetToProjectMap = new THashMap<>(); private final MessageBusConnection myConnection; private final FileDocumentManager myFileDocumentManager; private final FileTypeManagerImpl myFileTypeManager; private final Set<ID<?, ?>> myUpToDateIndicesForUnsavedOrTransactedDocuments = ContainerUtil.newConcurrentSet(); private volatile SmartFMap<Document, PsiFile> myTransactionMap = SmartFMap.emptyMap(); private final boolean myIsUnitTestMode; @Nullable private ScheduledFuture<?> myFlushingFuture; private final AtomicInteger myLocalModCount = new AtomicInteger(); private final AtomicInteger myFilesModCount = new AtomicInteger(); private final AtomicInteger myUpdatingFiles = new AtomicInteger(); private final Set<Project> myProjectsBeingUpdated = ContainerUtil.newConcurrentSet(); private final IndexAccessValidator myAccessValidator = new IndexAccessValidator(); private volatile boolean myInitialized; private Future<IndexConfiguration> myStateFuture; private volatile IndexConfiguration myState; private volatile Future<?> myAllIndicesInitializedFuture; private IndexConfiguration getState() { if (!myInitialized) { //throw new IndexNotReadyException(); LOG.error("Unexpected initialization problem"); } IndexConfiguration state = myState; // memory barrier if (state == null) { try { myState = state = myStateFuture.get(); } catch (Throwable t) { throw new RuntimeException(t); } } return state; } public FileBasedIndexImpl(Application application, VirtualFileManager vfManager, FileDocumentManager fdm, FileTypeManagerImpl fileTypeManager, @NotNull MessageBus bus, ManagingFS managingFS) { myFileDocumentManager = fdm; myFileTypeManager = fileTypeManager; myIsUnitTestMode = application.isUnitTestMode(); final MessageBusConnection connection = bus.connect(); connection.subscribe(PsiDocumentTransactionListener.TOPIC, new PsiDocumentTransactionListener() { @Override public void transactionStarted(@NotNull final Document doc, @NotNull final PsiFile file) { myTransactionMap = myTransactionMap.plus(doc, file); myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } @Override public void transactionCompleted(@NotNull final Document doc, @NotNull final PsiFile file) { myTransactionMap = myTransactionMap.minus(doc); } }); connection.subscribe(FileTypeManager.TOPIC, new FileTypeListener() { @Nullable private Map<FileType, Set<String>> myTypeToExtensionMap; @Override public void beforeFileTypesChanged(@NotNull final FileTypeEvent event) { cleanupProcessedFlag(); myTypeToExtensionMap = new THashMap<>(); for (FileType type : myFileTypeManager.getRegisteredFileTypes()) { myTypeToExtensionMap.put(type, getExtensions(type)); } } @Override public void fileTypesChanged(@NotNull final FileTypeEvent event) { final Map<FileType, Set<String>> oldTypeToExtensionsMap = myTypeToExtensionMap; myTypeToExtensionMap = null; if (oldTypeToExtensionsMap != null) { final Map<FileType, Set<String>> newTypeToExtensionsMap = new THashMap<>(); for (FileType type : myFileTypeManager.getRegisteredFileTypes()) { newTypeToExtensionsMap.put(type, getExtensions(type)); } // we are interested only in extension changes or removals. // addition of an extension is handled separately by RootsChanged event if (!newTypeToExtensionsMap.keySet().containsAll(oldTypeToExtensionsMap.keySet())) { Set<FileType> removedFileTypes = new HashSet<>(oldTypeToExtensionsMap.keySet()); removedFileTypes.removeAll(newTypeToExtensionsMap.keySet()); rebuildAllIndices("The following file types were removed/are no longer associated: " + removedFileTypes); return; } for (Map.Entry<FileType, Set<String>> entry : oldTypeToExtensionsMap.entrySet()) { FileType fileType = entry.getKey(); Set<String> strings = entry.getValue(); if (!newTypeToExtensionsMap.get(fileType).containsAll(strings)) { Set<String> removedExtensions = new HashSet<>(strings); removedExtensions.removeAll(newTypeToExtensionsMap.get(fileType)); rebuildAllIndices(fileType.getName() + " is no longer associated with extension(s) " + String.join(",", removedExtensions)); return; } } } } @NotNull private Set<String> getExtensions(@NotNull FileType type) { final Set<String> set = new THashSet<>(); for (FileNameMatcher matcher : myFileTypeManager.getAssociations(type)) { set.add(matcher.getPresentableString()); } return set; } private void rebuildAllIndices(@NotNull String reason) { doClearIndices(); scheduleIndexRebuild("File type change" + ", " + reason); } }); connection.subscribe(AppTopics.FILE_DOCUMENT_SYNC, new FileDocumentManagerListener() { @Override public void fileContentReloaded(@NotNull VirtualFile file, @NotNull Document document) { cleanupMemoryStorage(true); } @Override public void unsavedDocumentsDropped() { cleanupMemoryStorage(false); } }); application.addApplicationListener(new ApplicationListener() { @Override public void writeActionStarted(@NotNull Object action) { myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } }, this); myChangedFilesCollector = new ChangedFilesCollector(managingFS); myConnection = connection; vfManager.addAsyncFileListener(myChangedFilesCollector, this); initComponent(); } @VisibleForTesting void doClearIndices() { waitUntilIndicesAreInitialized(); IndexingStamp.flushCaches(); for (ID<?, ?> indexId : getState().getIndexIDs()) { try { clearIndex(indexId); } catch (StorageException e) { LOG.info(e); } } } boolean processChangedFiles(@NotNull Project project, @NotNull Processor<? super VirtualFile> processor) { // avoid missing files when events are processed concurrently return Stream.concat(myChangedFilesCollector.getEventMerger().getChangedFiles(), myChangedFilesCollector.myFilesToUpdate.values().stream()) .filter(filesToBeIndexedForProjectCondition(project)) .distinct() .mapToInt(f -> processor.process(f) ? 1 : 0) .allMatch(success -> success == 1); } public static boolean isProjectOrWorkspaceFile(@NotNull VirtualFile file, @Nullable FileType fileType) { return ProjectCoreUtil.isProjectOrWorkspaceFile(file, fileType); } static boolean belongsToScope(VirtualFile file, VirtualFile restrictedTo, GlobalSearchScope filter) { if (!(file instanceof VirtualFileWithId) || !file.isValid()) { return false; } return (restrictedTo == null || Comparing.equal(file, restrictedTo)) && (filter == null || restrictedTo != null || filter.accept(file)); } @Override public void requestReindex(@NotNull final VirtualFile file) { GistManager.getInstance().invalidateData(); // todo: this is the same vfs event handling sequence that is produces after events of FileContentUtilCore.reparseFiles // but it is more costly than current code, see IDEA-192192 //myChangedFilesCollector.invalidateIndicesRecursively(file, false); //myChangedFilesCollector.buildIndicesForFileRecursively(file, false); myChangedFilesCollector.invalidateIndicesRecursively(file, true, myChangedFilesCollector.getEventMerger()); if (myInitialized) myChangedFilesCollector.ensureUpToDateAsync(); } private void initComponent() { myStateFuture = IndexInfrastructure.submitGenesisTask(new FileIndexDataInitialization()); if (!IndexInfrastructure.ourDoAsyncIndicesInitialization) { waitUntilIndicesAreInitialized(); } } private void waitUntilIndicesAreInitialized() { try { myStateFuture.get(); } catch (Throwable t) { LOG.error(t); } } /** * @return true if registered index requires full rebuild for some reason, e.g. is just created or corrupted */ private static <K, V> boolean registerIndexer(@NotNull final FileBasedIndexExtension<K, V> extension, @NotNull IndexConfiguration state) throws IOException { final ID<K, V> name = extension.getName(); final int version = extension.getVersion(); final File versionFile = IndexInfrastructure.getVersionFile(name); boolean versionChanged = false; if (IndexingStamp.versionDiffers(name, version)) { final boolean versionFileExisted = versionFile.exists(); if (versionFileExisted) { versionChanged = true; LOG.info("Version has changed for index " + name + ". The index will be rebuilt."); } else { LOG.debug("Index " + name + " will be built."); } if (extension.hasSnapshotMapping() && versionChanged) { FileUtil.deleteWithRenaming(IndexInfrastructure.getPersistentIndexRootDir(name)); } File rootDir = IndexInfrastructure.getIndexRootDir(name); if (versionFileExisted) FileUtil.deleteWithRenaming(rootDir); IndexingStamp.rewriteVersion(name, version); } initIndexStorage(extension, version, state); return versionChanged; } private static <K, V> void initIndexStorage(@NotNull FileBasedIndexExtension<K, V> extension, int version, @NotNull IndexConfiguration state) throws IOException { VfsAwareMapIndexStorage<K, V> storage = null; final ID<K, V> name = extension.getName(); boolean contentHashesEnumeratorOk = false; for (int attempt = 0; attempt < 2; attempt++) { try { if (extension.hasSnapshotMapping()) { ContentHashesSupport.initContentHashesEnumerator(); contentHashesEnumeratorOk = true; } storage = new VfsAwareMapIndexStorage<>( IndexInfrastructure.getStorageFile(name), extension.getKeyDescriptor(), extension.getValueExternalizer(), extension.getCacheSize(), extension.keyIsUniqueForIndexedFile(), extension.traceKeyHashToVirtualFileMapping() ); final InputFilter inputFilter = extension.getInputFilter(); final Set<FileType> addedTypes; if (inputFilter instanceof FileBasedIndex.FileTypeSpecificInputFilter) { addedTypes = new THashSet<>(); ((FileBasedIndex.FileTypeSpecificInputFilter)inputFilter).registerFileTypesUsedForIndexing(type -> { if (type != null) addedTypes.add(type); }); } else { addedTypes = null; } UpdatableIndex<K, V, FileContent> index = createIndex(extension, new MemoryIndexStorage<>(storage, name)); ProvidedIndexExtension<K, V> providedExtension = ProvidedIndexExtensionLocator.findProvidedIndexExtensionFor(extension); if (providedExtension != null) { index = ProvidedIndexExtension.wrapWithProvidedIndex(providedExtension, extension, index); } state.registerIndex(name, index, file -> file instanceof VirtualFileWithId && inputFilter.acceptInput(file) && !GlobalIndexFilter.isExcludedFromIndexViaFilters(file, name), version + GlobalIndexFilter.getFiltersVersion(name), addedTypes); break; } catch (Exception e) { LOG.info(e); boolean instantiatedStorage = storage != null; try { if (storage != null) storage.close(); storage = null; } catch (Exception ignored) { } FileUtil.deleteWithRenaming(IndexInfrastructure.getIndexRootDir(name)); if (extension.hasSnapshotMapping() && (!contentHashesEnumeratorOk || instantiatedStorage)) { FileUtil.deleteWithRenaming(IndexInfrastructure.getPersistentIndexRootDir(name)); // todo there is possibility of corruption of storage and content hashes } IndexingStamp.rewriteVersion(name, version); } } } private static void saveRegisteredIndicesAndDropUnregisteredOnes(@NotNull Collection<? extends ID<?, ?>> ids) { if (ApplicationManager.getApplication().isDisposed() || !IndexInfrastructure.hasIndices()) { return; } final File registeredIndicesFile = new File(PathManager.getIndexRoot(), "registered"); final Set<String> indicesToDrop = new THashSet<>(); try (DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(registeredIndicesFile)))) { final int size = in.readInt(); for (int idx = 0; idx < size; idx++) { indicesToDrop.add(IOUtil.readString(in)); } } catch (IOException ignored) { } for (ID<?, ?> key : ids) { indicesToDrop.remove(key.getName()); } if (!indicesToDrop.isEmpty()) { LOG.info("Dropping indices:" + StringUtil.join(indicesToDrop, ",")); for (String s : indicesToDrop) { FileUtil.deleteWithRenaming(IndexInfrastructure.getIndexRootDir(ID.create(s))); } } FileUtil.createIfDoesntExist(registeredIndicesFile); try (DataOutputStream os = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(registeredIndicesFile)))) { os.writeInt(ids.size()); for (ID<?, ?> id : ids) { IOUtil.writeString(id.getName(), os); } } catch (IOException ignored) { } } @NotNull private static <K, V> UpdatableIndex<K, V, FileContent> createIndex(@NotNull final FileBasedIndexExtension<K, V> extension, @NotNull final MemoryIndexStorage<K, V> storage) throws StorageException, IOException { return extension instanceof CustomImplementationFileBasedIndexExtension ? ((CustomImplementationFileBasedIndexExtension<K, V>)extension).createIndexImplementation(extension, storage) : new VfsAwareMapReduceIndex<>(extension, storage); } @Override public void dispose() { performShutdown(); } private final AtomicBoolean myShutdownPerformed = new AtomicBoolean(false); private void performShutdown() { if (!myShutdownPerformed.compareAndSet(false, true)) { return; // already shut down } waitUntilAllIndicesAreInitialized(); try { if (myFlushingFuture != null) { myFlushingFuture.cancel(false); myFlushingFuture = null; } } finally { LOG.info("START INDEX SHUTDOWN"); try { PersistentIndicesConfiguration.saveConfiguration(); for (VirtualFile file : myChangedFilesCollector.getAllFilesToUpdate()) { if (!file.isValid()) { removeDataFromIndicesForFile(Math.abs(getIdMaskingNonIdBasedFile(file)), file); } } IndexingStamp.flushCaches(); IndexConfiguration state = getState(); for (ID<?, ?> indexId : state.getIndexIDs()) { try { final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); assert index != null; if (!RebuildStatus.isOk(indexId)) { index.clear(); // if the index was scheduled for rebuild, only clean it } index.dispose(); } catch (Throwable throwable) { LOG.info("Problem disposing " + indexId, throwable); } } ContentHashesSupport.flushContentHashes(); SharedIndicesData.flushData(); myConnection.disconnect(); } catch (Throwable e) { LOG.error("Problems during index shutdown", e); } LOG.info("END INDEX SHUTDOWN"); } } private void waitUntilAllIndicesAreInitialized() { try { waitUntilIndicesAreInitialized(); myAllIndicesInitializedFuture.get(); } catch (Throwable ignore) {} } private void removeDataFromIndicesForFile(int fileId, VirtualFile file) { VirtualFile originalFile = file instanceof DeletedVirtualFileStub ? ((DeletedVirtualFileStub)file).getOriginalFile() : file; final List<ID<?, ?>> states = IndexingStamp.getNontrivialFileIndexedStates(fileId); if (!states.isEmpty()) { ProgressManager.getInstance().executeNonCancelableSection(() -> removeFileDataFromIndices(states, fileId, originalFile)); } } private void removeFileDataFromIndices(@NotNull Collection<? extends ID<?, ?>> affectedIndices, int inputId, VirtualFile file) { // document diff can depend on previous value that will be removed removeTransientFileDataFromIndices(affectedIndices, inputId, file); Throwable unexpectedError = null; for (ID<?, ?> indexId : affectedIndices) { try { updateSingleIndex(indexId, null, inputId, null); } catch (ProcessCanceledException pce) { LOG.error(pce); } catch (Throwable e) { LOG.info(e); if (unexpectedError == null) { unexpectedError = e; } } } IndexingStamp.flushCache(inputId); if (unexpectedError != null) { LOG.error(unexpectedError); } } private void removeTransientFileDataFromIndices(Collection<? extends ID<?, ?>> indices, int inputId, VirtualFile file) { for (ID<?, ?> indexId : indices) { final UpdatableIndex index = myState.getIndex(indexId); assert index != null; index.removeTransientDataForFile(inputId); } Document document = myFileDocumentManager.getCachedDocument(file); if (document != null) { myLastIndexedDocStamps.clearForDocument(document); document.putUserData(ourFileContentKey, null); } if (!myUpToDateIndicesForUnsavedOrTransactedDocuments.isEmpty()) { myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } } private void flushAllIndices(final long modCount) { if (HeavyProcessLatch.INSTANCE.isRunning()) { return; } IndexingStamp.flushCaches(); IndexConfiguration state = getState(); for (ID<?, ?> indexId : new ArrayList<>(state.getIndexIDs())) { if (HeavyProcessLatch.INSTANCE.isRunning() || modCount != myLocalModCount.get()) { return; // do not interfere with 'main' jobs } try { final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); if (index != null) { index.flush(); } } catch (Throwable e) { requestRebuild(indexId, e); } } ContentHashesSupport.flushContentHashes(); SharedIndicesData.flushData(); } @Override @NotNull public <K> Collection<K> getAllKeys(@NotNull final ID<K, ?> indexId, @NotNull Project project) { Set<K> allKeys = new THashSet<>(); processAllKeys(indexId, Processors.cancelableCollectProcessor(allKeys), project); return allKeys; } @Override public <K> boolean processAllKeys(@NotNull final ID<K, ?> indexId, @NotNull Processor<? super K> processor, @Nullable Project project) { return processAllKeys(indexId, processor, project == null ? new EverythingGlobalScope() : GlobalSearchScope.allScope(project), null); } @Override public <K> boolean processAllKeys(@NotNull ID<K, ?> indexId, @NotNull Processor<? super K> processor, @NotNull GlobalSearchScope scope, @Nullable IdFilter idFilter) { try { waitUntilIndicesAreInitialized(); final UpdatableIndex<K, ?, FileContent> index = getIndex(indexId); if (index == null) { return true; } ensureUpToDate(indexId, scope.getProject(), scope); return index.processAllKeys(processor, scope, idFilter); } catch (StorageException e) { scheduleRebuild(indexId, e); } catch (RuntimeException e) { final Throwable cause = e.getCause(); if (cause instanceof StorageException || cause instanceof IOException) { scheduleRebuild(indexId, cause); } else { throw e; } } return false; } @NotNull @Override public <K, V> Map<K, V> getFileData(@NotNull ID<K, V> id, @NotNull VirtualFile virtualFile, @NotNull Project project) { int fileId = getFileId(virtualFile); Map<K, V> map = processExceptions(id, virtualFile, GlobalSearchScope.fileScope(project, virtualFile), index -> index.getIndexedFileData(fileId)); return ContainerUtil.notNullize(map); } private static final ThreadLocal<Integer> myUpToDateCheckState = new ThreadLocal<>(); public static <T,E extends Throwable> T disableUpToDateCheckIn(@NotNull ThrowableComputable<T, E> runnable) throws E { disableUpToDateCheckForCurrentThread(); try { return runnable.compute(); } finally { enableUpToDateCheckForCurrentThread(); } } private static void disableUpToDateCheckForCurrentThread() { final Integer currentValue = myUpToDateCheckState.get(); myUpToDateCheckState.set(currentValue == null ? 1 : currentValue.intValue() + 1); } private static void enableUpToDateCheckForCurrentThread() { final Integer currentValue = myUpToDateCheckState.get(); if (currentValue != null) { final int newValue = currentValue.intValue() - 1; if (newValue != 0) { myUpToDateCheckState.set(newValue); } else { myUpToDateCheckState.remove(); } } } private static boolean isUpToDateCheckEnabled() { final Integer value = myUpToDateCheckState.get(); return value == null || value.intValue() == 0; } private final ThreadLocal<Boolean> myReentrancyGuard = ThreadLocal.withInitial(() -> Boolean.FALSE); /** * DO NOT CALL DIRECTLY IN CLIENT CODE * The method is internal to indexing engine end is called internally. The method is public due to implementation details */ @Override public <K> void ensureUpToDate(@NotNull final ID<K, ?> indexId, @Nullable Project project, @Nullable GlobalSearchScope filter) { waitUntilIndicesAreInitialized(); ensureUpToDate(indexId, project, filter, null); } protected <K> void ensureUpToDate(@NotNull final ID<K, ?> indexId, @Nullable Project project, @Nullable GlobalSearchScope filter, @Nullable VirtualFile restrictedFile) { ProgressManager.checkCanceled(); myChangedFilesCollector.ensureUpToDate(); ApplicationManager.getApplication().assertReadAccessAllowed(); if (!needsFileContentLoading(indexId)) { return; //indexed eagerly in foreground while building unindexed file list } if (filter == GlobalSearchScope.EMPTY_SCOPE) { return; } if (ActionUtil.isDumbMode(project)) { handleDumbMode(project); } NoAccessDuringPsiEvents.checkCallContext(); if (myReentrancyGuard.get().booleanValue()) { //assert false : "ensureUpToDate() is not reentrant!"; return; } myReentrancyGuard.set(Boolean.TRUE); try { if (isUpToDateCheckEnabled()) { try { if (!RebuildStatus.isOk(indexId)) { throw new ServiceNotReadyException(); } forceUpdate(project, filter, restrictedFile); indexUnsavedDocuments(indexId, project, filter, restrictedFile); } catch (RuntimeException e) { final Throwable cause = e.getCause(); if (cause instanceof StorageException || cause instanceof IOException) { scheduleRebuild(indexId, e); } else { throw e; } } } } finally { myReentrancyGuard.set(Boolean.FALSE); } } private static void handleDumbMode(@Nullable Project project) { ProgressManager.checkCanceled(); // DumbModeAction.CANCEL if (project != null) { final ProgressIndicator progressIndicator = ProgressManager.getInstance().getProgressIndicator(); if (progressIndicator instanceof BackgroundableProcessIndicator) { final BackgroundableProcessIndicator indicator = (BackgroundableProcessIndicator)progressIndicator; if (indicator.getDumbModeAction() == DumbModeAction.WAIT) { assert !ApplicationManager.getApplication().isDispatchThread(); DumbService.getInstance(project).waitForSmartMode(); return; } } } throw IndexNotReadyException.create(project == null ? null : DumbServiceImpl.getInstance(project).getDumbModeStartTrace()); } @Override @NotNull public <K, V> List<V> getValues(@NotNull final ID<K, V> indexId, @NotNull K dataKey, @NotNull final GlobalSearchScope filter) { VirtualFile restrictToFile = null; if (filter instanceof Iterable) { // optimisation: in case of one-file-scope we can do better. // check if the scope knows how to extract some files off itself //noinspection unchecked Iterator<VirtualFile> virtualFileIterator = ((Iterable<VirtualFile>)filter).iterator(); if (virtualFileIterator.hasNext()) { VirtualFile restrictToFileCandidate = virtualFileIterator.next(); if (!virtualFileIterator.hasNext()) { restrictToFile = restrictToFileCandidate; } } } final List<V> values = new SmartList<>(); ValueProcessor<V> processor = (file, value) -> { values.add(value); return true; }; if (restrictToFile != null) { processValuesInOneFile(indexId, dataKey, restrictToFile, processor, filter); } else { processValuesInScope(indexId, dataKey, true, filter, null, processor); } return values; } @Override @NotNull public <K, V> Collection<VirtualFile> getContainingFiles(@NotNull final ID<K, V> indexId, @NotNull K dataKey, @NotNull final GlobalSearchScope filter) { final Set<VirtualFile> files = new THashSet<>(); processValuesInScope(indexId, dataKey, false, filter, null, (file, value) -> { files.add(file); return true; }); return files; } @Override public <K, V> boolean processValues(@NotNull final ID<K, V> indexId, @NotNull final K dataKey, @Nullable final VirtualFile inFile, @NotNull ValueProcessor<? super V> processor, @NotNull final GlobalSearchScope filter) { return processValues(indexId, dataKey, inFile, processor, filter, null); } @Override public <K, V> boolean processValues(@NotNull ID<K, V> indexId, @NotNull K dataKey, @Nullable VirtualFile inFile, @NotNull ValueProcessor<? super V> processor, @NotNull GlobalSearchScope filter, @Nullable IdFilter idFilter) { return inFile != null ? processValuesInOneFile(indexId, dataKey, inFile, processor, filter) : processValuesInScope(indexId, dataKey, false, filter, idFilter, processor); } @Override public <K, V> long getIndexModificationStamp(@NotNull ID<K, V> indexId, @NotNull Project project) { UpdatableIndex<K, V, FileContent> index = getState().getIndex(indexId); ensureUpToDate(indexId, project, GlobalSearchScope.allScope(project)); return index.getModificationStamp(); } @FunctionalInterface public interface IdValueProcessor<V> { /** * @param fileId the id of the file that the value came from * @param value a value to process * @return false if no further processing is needed, true otherwise */ boolean process(int fileId, V value); } /** * Process values for a given index key together with their containing file ids. Note that project is supplied * only to ensure that all the indices in that project are up to date; there's no guarantee that the processed file ids belong * to this project. */ public <K, V> boolean processAllValues(@NotNull ID<K, V> indexId, @NotNull K key, @NotNull Project project, @NotNull IdValueProcessor<? super V> processor) { return processValueIterator(indexId, key, null, GlobalSearchScope.allScope(project), valueIt -> { while (valueIt.hasNext()) { V value = valueIt.next(); for (ValueContainer.IntIterator inputIdsIterator = valueIt.getInputIdsIterator(); inputIdsIterator.hasNext(); ) { if (!processor.process(inputIdsIterator.next(), value)) { return false; } ProgressManager.checkCanceled(); } } return true; }); } @Nullable private <K, V, R> R processExceptions(@NotNull final ID<K, V> indexId, @Nullable final VirtualFile restrictToFile, @NotNull final GlobalSearchScope filter, @NotNull ThrowableConvertor<? super UpdatableIndex<K, V, FileContent>, ? extends R, ? extends StorageException> computable) { try { waitUntilIndicesAreInitialized(); final UpdatableIndex<K, V, FileContent> index = getIndex(indexId); if (index == null) { return null; } final Project project = filter.getProject(); //assert project != null : "GlobalSearchScope#getProject() should be not-null for all index queries"; ensureUpToDate(indexId, project, filter, restrictToFile); return myAccessValidator.validate(indexId, ()->ConcurrencyUtil.withLock(index.getReadLock(), ()->computable.convert(index))); } catch (StorageException e) { scheduleRebuild(indexId, e); } catch (RuntimeException e) { final Throwable cause = getCauseToRebuildIndex(e); if (cause != null) { scheduleRebuild(indexId, cause); } else { throw e; } } return null; } private <K, V> boolean processValuesInOneFile(@NotNull ID<K, V> indexId, @NotNull K dataKey, @NotNull VirtualFile restrictToFile, @NotNull ValueProcessor<? super V> processor, @NotNull GlobalSearchScope scope) { if (!(restrictToFile instanceof VirtualFileWithId)) return true; int restrictedFileId = getFileId(restrictToFile); return processValueIterator(indexId, dataKey, restrictToFile, scope, valueIt -> { while (valueIt.hasNext()) { V value = valueIt.next(); if (valueIt.getValueAssociationPredicate().contains(restrictedFileId) && !processor.process(restrictToFile, value)) { return false; } ProgressManager.checkCanceled(); } return true; }); } private <K, V> boolean processValuesInScope(@NotNull ID<K, V> indexId, @NotNull K dataKey, boolean ensureValueProcessedOnce, @NotNull GlobalSearchScope scope, @Nullable IdFilter idFilter, @NotNull ValueProcessor<? super V> processor) { PersistentFS fs = (PersistentFS)ManagingFS.getInstance(); IdFilter filter = idFilter != null ? idFilter : projectIndexableFiles(scope.getProject()); return processValueIterator(indexId, dataKey, null, scope, valueIt -> { while (valueIt.hasNext()) { final V value = valueIt.next(); for (final ValueContainer.IntIterator inputIdsIterator = valueIt.getInputIdsIterator(); inputIdsIterator.hasNext(); ) { final int id = inputIdsIterator.next(); if (filter != null && !filter.containsFileId(id)) continue; VirtualFile file = IndexInfrastructure.findFileByIdIfCached(fs, id); if (file != null && scope.accept(file)) { if (!processor.process(file, value)) { return false; } if (ensureValueProcessedOnce) { ProgressManager.checkCanceled(); break; // continue with the next value } } ProgressManager.checkCanceled(); } } return true; }); } private <K, V> boolean processValueIterator(@NotNull ID<K, V> indexId, @NotNull K dataKey, @Nullable VirtualFile restrictToFile, @NotNull GlobalSearchScope scope, @NotNull Processor<? super InvertedIndexValueIterator<V>> valueProcessor) { final Boolean result = processExceptions(indexId, restrictToFile, scope, index -> valueProcessor.process((InvertedIndexValueIterator<V>)index.getData(dataKey).getValueIterator())); return result == null || result.booleanValue(); } @Override public <K, V> boolean processFilesContainingAllKeys(@NotNull final ID<K, V> indexId, @NotNull final Collection<? extends K> dataKeys, @NotNull final GlobalSearchScope filter, @Nullable Condition<? super V> valueChecker, @NotNull final Processor<? super VirtualFile> processor) { ProjectIndexableFilesFilter filesSet = projectIndexableFiles(filter.getProject()); final TIntHashSet set = collectFileIdsContainingAllKeys(indexId, dataKeys, filter, valueChecker, filesSet); return set != null && processVirtualFiles(set, filter, processor); } private static final Key<SoftReference<ProjectIndexableFilesFilter>> ourProjectFilesSetKey = Key.create("projectFiles"); void filesUpdateEnumerationFinished() { } @TestOnly public void cleanupForNextTest() { myChangedFilesCollector.ensureUpToDate(); myTransactionMap = SmartFMap.emptyMap(); IndexConfiguration state = getState(); for (ID<?, ?> indexId : state.getIndexIDs()) { final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); assert index != null; index.cleanupForNextTest(); } } @TestOnly public IndexedFilesListener getChangedFilesCollector() { return myChangedFilesCollector; } public static final class ProjectIndexableFilesFilter extends IdFilter { private static final int SHIFT = 6; private static final int MASK = (1 << SHIFT) - 1; private final long[] myBitMask; private final int myModificationCount; private final int myMinId; private final int myMaxId; private ProjectIndexableFilesFilter(@NotNull TIntArrayList set, int modificationCount) { myModificationCount = modificationCount; final int[] minMax = new int[2]; if (!set.isEmpty()) { minMax[0] = minMax[1] = set.get(0); } set.forEach(value -> { minMax[0] = Math.min(minMax[0], value); minMax[1] = Math.max(minMax[1], value); return true; }); myMaxId = minMax[1]; myMinId = minMax[0]; myBitMask = new long[((myMaxId - myMinId) >> SHIFT) + 1]; set.forEach(value -> { value -= myMinId; myBitMask[value >> SHIFT] |= (1L << (value & MASK)); return true; }); } @Override public boolean containsFileId(int id) { if (id < myMinId) return false; if (id > myMaxId) return false; id -= myMinId; return (myBitMask[id >> SHIFT] & (1L << (id & MASK))) != 0; } } void filesUpdateStarted(Project project) { myChangedFilesCollector.ensureUpToDate(); myProjectsBeingUpdated.add(project); myFilesModCount.incrementAndGet(); } void filesUpdateFinished(@NotNull Project project) { myProjectsBeingUpdated.remove(project); myFilesModCount.incrementAndGet(); } private final Lock myCalcIndexableFilesLock = new ReentrantLock(); @Nullable public ProjectIndexableFilesFilter projectIndexableFiles(@Nullable Project project) { if (project == null || project.isDefault() || myUpdatingFiles.get() > 0) return null; if (myProjectsBeingUpdated.contains(project)) return null; SoftReference<ProjectIndexableFilesFilter> reference = project.getUserData(ourProjectFilesSetKey); ProjectIndexableFilesFilter data = com.intellij.reference.SoftReference.dereference(reference); int currentFileModCount = myFilesModCount.get(); if (data != null && data.myModificationCount == currentFileModCount) return data; if (myCalcIndexableFilesLock.tryLock()) { // make best effort for calculating filter try { reference = project.getUserData(ourProjectFilesSetKey); data = com.intellij.reference.SoftReference.dereference(reference); if (data != null && data.myModificationCount == currentFileModCount) { return data; } long start = System.currentTimeMillis(); final TIntArrayList filesSet = new TIntArrayList(); iterateIndexableFiles(fileOrDir -> { ProgressManager.checkCanceled(); if (fileOrDir instanceof VirtualFileWithId) { filesSet.add(((VirtualFileWithId)fileOrDir).getId()); } return true; }, project, SilentProgressIndicator.create()); ProjectIndexableFilesFilter filter = new ProjectIndexableFilesFilter(filesSet, currentFileModCount); project.putUserData(ourProjectFilesSetKey, new SoftReference<>(filter)); long finish = System.currentTimeMillis(); LOG.debug(filesSet.size() + " files iterated in " + (finish - start) + " ms"); return filter; } finally { myCalcIndexableFilesLock.unlock(); } } return null; // ok, no filtering } @Nullable private <K, V> TIntHashSet collectFileIdsContainingAllKeys(@NotNull final ID<K, V> indexId, @NotNull final Collection<? extends K> dataKeys, @NotNull final GlobalSearchScope filter, @Nullable final Condition<? super V> valueChecker, @Nullable final ProjectIndexableFilesFilter projectFilesFilter) { ThrowableConvertor<UpdatableIndex<K, V, FileContent>, TIntHashSet, StorageException> convertor = index -> InvertedIndexUtil.collectInputIdsContainingAllKeys(index, dataKeys, __ -> { ProgressManager.checkCanceled(); return true; }, valueChecker, projectFilesFilter == null ? null : projectFilesFilter::containsFileId); return processExceptions(indexId, null, filter, convertor); } private static boolean processVirtualFiles(@NotNull TIntHashSet ids, @NotNull final GlobalSearchScope filter, @NotNull final Processor<? super VirtualFile> processor) { final PersistentFS fs = (PersistentFS)ManagingFS.getInstance(); return ids.forEach(id -> { ProgressManager.checkCanceled(); VirtualFile file = IndexInfrastructure.findFileByIdIfCached(fs, id); if (file != null && filter.accept(file)) { return processor.process(file); } return true; }); } @Nullable public static Throwable getCauseToRebuildIndex(@NotNull RuntimeException e) { if (ApplicationManager.getApplication().isUnitTestMode()) { // avoid rebuilding index in tests since we do it synchronously in requestRebuild and we can have readAction at hand return null; } if (e instanceof ProcessCanceledException) return null; if (e instanceof IndexOutOfBoundsException) return e; // something wrong with direct byte buffer Throwable cause = e.getCause(); if (cause instanceof StorageException || cause instanceof IOException || cause instanceof IllegalArgumentException) return cause; return null; } @Override public <K, V> boolean getFilesWithKey(@NotNull final ID<K, V> indexId, @NotNull final Set<? extends K> dataKeys, @NotNull Processor<? super VirtualFile> processor, @NotNull GlobalSearchScope filter) { return processFilesContainingAllKeys(indexId, dataKeys, filter, null, processor); } @Override public <K> void scheduleRebuild(@NotNull final ID<K, ?> indexId, @NotNull final Throwable e) { requestRebuild(indexId, e); } private static void scheduleIndexRebuild(String reason) { LOG.info("scheduleIndexRebuild, reason: " + reason); for (Project project : ProjectManager.getInstance().getOpenProjects()) { DumbService.getInstance(project).queueTask(new UnindexedFilesUpdater(project)); } } void clearIndicesIfNecessary() { waitUntilIndicesAreInitialized(); for (ID<?, ?> indexId : getState().getIndexIDs()) { try { RebuildStatus.clearIndexIfNecessary(indexId, getIndex(indexId)::clear); } catch (StorageException e) { requestRebuild(indexId); LOG.error(e); } } } private void clearIndex(@NotNull final ID<?, ?> indexId) throws StorageException { advanceIndexVersion(indexId); final UpdatableIndex<?, ?, FileContent> index = myState.getIndex(indexId); assert index != null : "Index with key " + indexId + " not found or not registered properly"; index.clear(); } private void advanceIndexVersion(ID<?, ?> indexId) { try { IndexingStamp.rewriteVersion(indexId, myState.getIndexVersion(indexId)); } catch (IOException e) { LOG.error(e); } } @NotNull private Set<Document> getUnsavedDocuments() { Document[] documents = myFileDocumentManager.getUnsavedDocuments(); if (documents.length == 0) return Collections.emptySet(); if (documents.length == 1) return Collections.singleton(documents[0]); return new THashSet<>(Arrays.asList(documents)); } @NotNull private Set<Document> getTransactedDocuments() { return myTransactionMap.keySet(); } private void indexUnsavedDocuments(@NotNull final ID<?, ?> indexId, @Nullable Project project, final GlobalSearchScope filter, final VirtualFile restrictedFile) { if (myUpToDateIndicesForUnsavedOrTransactedDocuments.contains(indexId)) { return; // no need to index unsaved docs // todo: check scope ? } Collection<Document> documents = getUnsavedDocuments(); boolean psiBasedIndex = myPsiDependentIndices.contains(indexId); if(psiBasedIndex) { Set<Document> transactedDocuments = getTransactedDocuments(); if (documents.isEmpty()) { documents = transactedDocuments; } else if (!transactedDocuments.isEmpty()) { documents = new THashSet<>(documents); documents.addAll(transactedDocuments); } Document[] uncommittedDocuments = project != null ? PsiDocumentManager.getInstance(project).getUncommittedDocuments() : Document.EMPTY_ARRAY; if (uncommittedDocuments.length > 0) { List<Document> uncommittedDocumentsCollection = Arrays.asList(uncommittedDocuments); if (documents.isEmpty()) documents = uncommittedDocumentsCollection; else { if (!(documents instanceof THashSet)) documents = new THashSet<>(documents); documents.addAll(uncommittedDocumentsCollection); } } } if (!documents.isEmpty()) { Collection<Document> documentsToProcessForProject = ContainerUtil.filter(documents, document -> belongsToScope(myFileDocumentManager.getFile(document), restrictedFile, filter)); if (!documentsToProcessForProject.isEmpty()) { DocumentUpdateTask task = myUnsavedDataUpdateTasks.get(indexId); assert task != null : "Task for unsaved data indexing was not initialized for index " + indexId; if(runUpdate(true, () -> task.processAll(documentsToProcessForProject, project)) && documentsToProcessForProject.size() == documents.size() && !hasActiveTransactions() ) { ProgressManager.checkCanceled(); myUpToDateIndicesForUnsavedOrTransactedDocuments.add(indexId); } } } } private boolean hasActiveTransactions() { return !myTransactionMap.isEmpty(); } private interface DocumentContent { @NotNull CharSequence getText(); long getModificationStamp(); } private static class AuthenticContent implements DocumentContent { private final Document myDocument; private AuthenticContent(final Document document) { myDocument = document; } @NotNull @Override public CharSequence getText() { return myDocument.getImmutableCharSequence(); } @Override public long getModificationStamp() { return myDocument.getModificationStamp(); } } private static class PsiContent implements DocumentContent { private final Document myDocument; private final PsiFile myFile; private PsiContent(final Document document, final PsiFile file) { myDocument = document; myFile = file; } @NotNull @Override public CharSequence getText() { if (myFile.getViewProvider().getModificationStamp() != myDocument.getModificationStamp()) { final ASTNode node = myFile.getNode(); assert node != null; return node.getChars(); } return myDocument.getImmutableCharSequence(); } @Override public long getModificationStamp() { return myFile.getViewProvider().getModificationStamp(); } } private static final Key<WeakReference<FileContentImpl>> ourFileContentKey = Key.create("unsaved.document.index.content"); // returns false if doc was not indexed because it is already up to date // return true if document was indexed // caller is responsible to ensure no concurrent same document processing private boolean indexUnsavedDocument(@NotNull final Document document, @NotNull final ID<?, ?> requestedIndexId, final Project project, @NotNull final VirtualFile vFile) { final PsiFile dominantContentFile = project == null ? null : findLatestKnownPsiForUncomittedDocument(document, project); final DocumentContent content; if (dominantContentFile != null && dominantContentFile.getViewProvider().getModificationStamp() != document.getModificationStamp()) { content = new PsiContent(document, dominantContentFile); } else { content = new AuthenticContent(document); } boolean psiBasedIndex = myPsiDependentIndices.contains(requestedIndexId); final long currentDocStamp = psiBasedIndex ? PsiDocumentManager.getInstance(project).getLastCommittedStamp(document) : content.getModificationStamp(); final long previousDocStamp = myLastIndexedDocStamps.get(document, requestedIndexId); if (previousDocStamp == currentDocStamp) return false; final CharSequence contentText = content.getText(); myFileTypeManager.freezeFileTypeTemporarilyIn(vFile, () -> { if (getAffectedIndexCandidates(vFile).contains(requestedIndexId) && getInputFilter(requestedIndexId).acceptInput(vFile)) { final int inputId = Math.abs(getFileId(vFile)); if (!isTooLarge(vFile, contentText.length())) { // Reasonably attempt to use same file content when calculating indices as we can evaluate them several at once and store in file content WeakReference<FileContentImpl> previousContentRef = document.getUserData(ourFileContentKey); FileContentImpl previousContent = com.intellij.reference.SoftReference.dereference(previousContentRef); final FileContentImpl newFc; if (previousContent != null && previousContent.getStamp() == currentDocStamp) { newFc = previousContent; } else { newFc = new FileContentImpl(vFile, contentText, currentDocStamp); if (IdIndex.ourSnapshotMappingsEnabled) { newFc.putUserData(UpdatableSnapshotInputMappingIndex.FORCE_IGNORE_MAPPING_INDEX_UPDATE, Boolean.TRUE); } document.putUserData(ourFileContentKey, new WeakReference<>(newFc)); } initFileContent(newFc, project, dominantContentFile); newFc.ensureThreadSafeLighterAST(); if (content instanceof AuthenticContent) { newFc.putUserData(PlatformIdTableBuilding.EDITOR_HIGHLIGHTER, EditorHighlighterCache.getEditorHighlighterForCachesBuilding(document)); } try { getIndex(requestedIndexId).update(inputId, newFc).compute(); } finally { cleanFileContent(newFc, dominantContentFile); } } else { // effectively wipe the data from the indices getIndex(requestedIndexId).update(inputId, null).compute(); } } long previousState = myLastIndexedDocStamps.set(document, requestedIndexId, currentDocStamp); assert previousState == previousDocStamp; }); return true; } private final StorageGuard myStorageLock = new StorageGuard(); private volatile boolean myPreviousDataBufferingState; private final Object myBufferingStateUpdateLock = new Object(); @ApiStatus.Experimental public void runCleanupAction(@NotNull Runnable cleanupAction) { Computable<Boolean> updateComputable = () -> { cleanupAction.run(); return true; }; runUpdate(false, updateComputable); runUpdate(true, updateComputable); } private boolean runUpdate(boolean transientInMemoryIndices, Computable<Boolean> update) { StorageGuard.StorageModeExitHandler storageModeExitHandler = myStorageLock.enter(transientInMemoryIndices); if (myPreviousDataBufferingState != transientInMemoryIndices) { synchronized (myBufferingStateUpdateLock) { if (myPreviousDataBufferingState != transientInMemoryIndices) { IndexConfiguration state = getState(); for (ID<?, ?> indexId : state.getIndexIDs()) { final UpdatableIndex index = state.getIndex(indexId); assert index != null; index.setBufferingEnabled(transientInMemoryIndices); } myPreviousDataBufferingState = transientInMemoryIndices; } } } try { return update.compute(); } finally { storageModeExitHandler.leave(); } } private void cleanupMemoryStorage(boolean skipPsiBasedIndices) { myLastIndexedDocStamps.clear(); IndexConfiguration state = myState; if (state == null) { // avoid waiting for end of indices initialization (IDEA-173382) // in memory content will appear on indexing (in read action) and here is event dispatch (write context) return; } for (ID<?, ?> indexId : state.getIndexIDs()) { if (skipPsiBasedIndices && myPsiDependentIndices.contains(indexId)) continue; final UpdatableIndex<?, ?, FileContent> index = state.getIndex(indexId); assert index != null; index.cleanupMemoryStorage(); } } @Override public void requestRebuild(@NotNull final ID<?, ?> indexId, final Throwable throwable) { if (!myExtensionsRelatedDataWasLoaded) { IndexInfrastructure.submitGenesisTask(() -> { waitUntilIndicesAreInitialized(); // should be always true here since the genesis pool is sequential doRequestRebuild(indexId, throwable); return null; }); } else { doRequestRebuild(indexId, throwable); } } private void doRequestRebuild(@NotNull ID<?, ?> indexId, Throwable throwable) { cleanupProcessedFlag(); if (!myExtensionsRelatedDataWasLoaded) reportUnexpectedAsyncInitState(); if (RebuildStatus.requestRebuild(indexId)) { String message = "Rebuild requested for index " + indexId; Application app = ApplicationManager.getApplication(); if (app.isUnitTestMode() && app.isReadAccessAllowed() && !app.isDispatchThread()) { // shouldn't happen in tests in general; so fail early with the exception that caused index to be rebuilt. // otherwise reindexing will fail anyway later, but with a much more cryptic assertion LOG.error(message, throwable); } else { LOG.info(message, throwable); } cleanupProcessedFlag(); if (!myInitialized) return; advanceIndexVersion(indexId); Runnable rebuildRunnable = () -> scheduleIndexRebuild("checkRebuild"); if (myIsUnitTestMode) { rebuildRunnable.run(); } else { // we do invoke later since we can have read lock acquired TransactionGuard.getInstance().submitTransactionLater(app, rebuildRunnable); } } } private static void reportUnexpectedAsyncInitState() { LOG.error("Unexpected async indices initialization problem"); } public <K, V> UpdatableIndex<K, V, FileContent> getIndex(ID<K, V> indexId) { return getState().getIndex(indexId); } private InputFilter getInputFilter(@NotNull ID<?, ?> indexId) { if (!myInitialized) { // 1. early vfs event that needs invalidation // 2. pushers that do synchronous indexing for contentless indices waitUntilIndicesAreInitialized(); } return getState().getInputFilter(indexId); } @NotNull Collection<VirtualFile> getFilesToUpdate(final Project project) { return ContainerUtil.filter(myChangedFilesCollector.getAllFilesToUpdate(), filesToBeIndexedForProjectCondition(project)::test); } @NotNull private Predicate<VirtualFile> filesToBeIndexedForProjectCondition(Project project) { return virtualFile -> { if (!virtualFile.isValid()) { return true; } for (IndexableFileSet set : myIndexableSets) { final Project proj = myIndexableSetToProjectMap.get(set); if (proj != null && !proj.equals(project)) { continue; // skip this set as associated with a different project } if (ReadAction.compute(() -> set.isInSet(virtualFile))) { return true; } } return false; }; } public boolean isFileUpToDate(VirtualFile file) { return !myChangedFilesCollector.isScheduledForUpdate(file); } // caller is responsible to ensure no concurrent same document processing void processRefreshedFile(@Nullable Project project, @NotNull final com.intellij.ide.caches.FileContent fileContent) { // ProcessCanceledException will cause re-adding the file to processing list final VirtualFile file = fileContent.getVirtualFile(); if (myChangedFilesCollector.isScheduledForUpdate(file)) { indexFileContent(project, fileContent); } } public void indexFileContent(@Nullable Project project, @NotNull com.intellij.ide.caches.FileContent content) { VirtualFile file = content.getVirtualFile(); final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); boolean setIndexedStatus = true; try { // if file was scheduled for update due to vfs events then it is present in myFilesToUpdate // in this case we consider that current indexing (out of roots backed CacheUpdater) will cover its content if (file.isValid() && content.getTimeStamp() != file.getTimeStamp()) { content = new com.intellij.ide.caches.FileContent(file); } if (!file.isValid() || isTooLarge(file)) { removeDataFromIndicesForFile(fileId, file); if (file instanceof DeletedVirtualFileStub && ((DeletedVirtualFileStub)file).isResurrected()) { doIndexFileContent(project, new com.intellij.ide.caches.FileContent(((DeletedVirtualFileStub)file).getOriginalFile())); } } else { setIndexedStatus = doIndexFileContent(project, content); } } finally { IndexingStamp.flushCache(fileId); } myChangedFilesCollector.removeFileIdFromFilesScheduledForUpdate(fileId); if (file instanceof VirtualFileSystemEntry && setIndexedStatus) ((VirtualFileSystemEntry)file).setFileIndexed(true); } private boolean doIndexFileContent(@Nullable Project project, @NotNull final com.intellij.ide.caches.FileContent content) { final VirtualFile file = content.getVirtualFile(); Ref<Boolean> setIndexedStatus = Ref.create(Boolean.TRUE); myFileTypeManager.freezeFileTypeTemporarilyIn(file, () -> { final FileType fileType = file.getFileType(); final Project finalProject = project == null ? ProjectUtil.guessProjectForFile(file) : project; PsiFile psiFile = null; FileContentImpl fc = null; int inputId = Math.abs(getFileId(file)); Set<ID<?, ?>> currentIndexedStates = new THashSet<>(IndexingStamp.getNontrivialFileIndexedStates(inputId)); final List<ID<?, ?>> affectedIndexCandidates = getAffectedIndexCandidates(file); //noinspection ForLoopReplaceableByForEach for (int i = 0, size = affectedIndexCandidates.size(); i < size; ++i) { final ID<?, ?> indexId = affectedIndexCandidates.get(i); if (shouldIndexFile(file, indexId)) { if (fc == null) { byte[] currentBytes; try { currentBytes = content.getBytes(); } catch (IOException e) { currentBytes = ArrayUtilRt.EMPTY_BYTE_ARRAY; } fc = new FileContentImpl(file, currentBytes); if (IdIndex.ourSnapshotMappingsEnabled) { FileType substituteFileType = SubstitutedFileType.substituteFileType(file, fileType, finalProject); byte[] hash = calculateHash(currentBytes, fc.getCharset(), fileType, substituteFileType); fc.setHash(hash); } psiFile = content.getUserData(IndexingDataKeys.PSI_FILE); initFileContent(fc, finalProject, psiFile); } try { ProgressManager.checkCanceled(); if (!updateSingleIndex(indexId, file, inputId, fc)) { setIndexedStatus.set(Boolean.FALSE); } currentIndexedStates.remove(indexId); } catch (ProcessCanceledException e) { cleanFileContent(fc, psiFile); throw e; } } } if (psiFile != null) { psiFile.putUserData(PsiFileImpl.BUILDING_STUB, null); } for(ID<?, ?> indexId:currentIndexedStates) { if(!getIndex(indexId).isIndexedStateForFile(inputId, file)) { ProgressManager.checkCanceled(); if (!updateSingleIndex(indexId, file, inputId, null)) { setIndexedStatus.set(Boolean.FALSE); } } } }); return setIndexedStatus.get(); } @NotNull public static byte[] calculateHash(@NotNull byte[] currentBytes, @NotNull Charset charset, @NotNull FileType fileType, @NotNull FileType substituteFileType) { return fileType.isBinary() ? ContentHashesSupport.calcContentHash(currentBytes, substituteFileType) : ContentHashesSupport.calcContentHashWithFileType(currentBytes, charset, substituteFileType); } public boolean isIndexingCandidate(@NotNull VirtualFile file, @NotNull ID<?, ?> indexId) { return !isTooLarge(file) && getAffectedIndexCandidates(file).contains(indexId); } @NotNull private List<ID<?, ?>> getAffectedIndexCandidates(@NotNull VirtualFile file) { if (file.isDirectory()) { return isProjectOrWorkspaceFile(file, null) ? Collections.emptyList() : myIndicesForDirectories; } FileType fileType = file.getFileType(); if(isProjectOrWorkspaceFile(file, fileType)) return Collections.emptyList(); return getState().getFileTypesForIndex(fileType); } private static void cleanFileContent(@NotNull FileContentImpl fc, PsiFile psiFile) { if (psiFile != null) psiFile.putUserData(PsiFileImpl.BUILDING_STUB, null); fc.putUserData(IndexingDataKeys.PSI_FILE, null); } private static void initFileContent(@NotNull FileContentImpl fc, Project project, PsiFile psiFile) { if (psiFile != null) { psiFile.putUserData(PsiFileImpl.BUILDING_STUB, true); fc.putUserData(IndexingDataKeys.PSI_FILE, psiFile); } fc.putUserData(IndexingDataKeys.PROJECT, project); } private boolean updateSingleIndex(@NotNull ID<?, ?> indexId, VirtualFile file, final int inputId, @Nullable FileContent currentFC) { if (!myExtensionsRelatedDataWasLoaded) reportUnexpectedAsyncInitState(); if (!RebuildStatus.isOk(indexId) && !myIsUnitTestMode) { return false; // the index is scheduled for rebuild, no need to update } myLocalModCount.incrementAndGet(); final UpdatableIndex<?, ?, FileContent> index = getIndex(indexId); assert index != null; boolean hasContent = currentFC != null; if (ourIndexedFile.get() != null) throw new AssertionError("Reentrant indexing"); ourIndexedFile.set(file); boolean updateCalculated = false; try { // important: no hard referencing currentFC to avoid OOME, the methods introduced for this purpose! // important: update is called out of try since possible indexer extension is HANDLED as single file fail / restart indexing policy final Computable<Boolean> update = index.update(inputId, currentFC); updateCalculated = true; scheduleUpdate(indexId, update, file, inputId, hasContent); } catch (RuntimeException exception) { Throwable causeToRebuildIndex = getCauseToRebuildIndex(exception); if (causeToRebuildIndex != null && (updateCalculated || causeToRebuildIndex instanceof IOException)) { requestRebuild(indexId, exception); return false; } throw exception; } finally { ourIndexedFile.remove(); } return true; } @Override public VirtualFile getFileBeingCurrentlyIndexed() { return ourIndexedFile.get(); } private class VirtualFileUpdateTask extends UpdateTask<VirtualFile> { @Override void doProcess(VirtualFile item, Project project) { processRefreshedFile(project, new com.intellij.ide.caches.FileContent(item)); } } private final VirtualFileUpdateTask myForceUpdateTask = new VirtualFileUpdateTask(); private final AtomicInteger myForceUpdateRequests = new AtomicInteger(); private void forceUpdate(@Nullable Project project, @Nullable final GlobalSearchScope filter, @Nullable final VirtualFile restrictedTo) { Collection<VirtualFile> allFilesToUpdate = myChangedFilesCollector.getAllFilesToUpdate(); if (!allFilesToUpdate.isEmpty()) { boolean includeFilesFromOtherProjects = restrictedTo == null && (myForceUpdateRequests.incrementAndGet() & 0x3F) == 0; List<VirtualFile> virtualFilesToBeUpdatedForProject = ContainerUtil.filter( allFilesToUpdate, new ProjectFilesCondition(projectIndexableFiles(project), filter, restrictedTo, includeFilesFromOtherProjects) ); if (!virtualFilesToBeUpdatedForProject.isEmpty()) { myForceUpdateTask.processAll(virtualFilesToBeUpdatedForProject, project); } } } private final Lock myReadLock; private final Lock myWriteLock; { ReadWriteLock lock = new ReentrantReadWriteLock(); myReadLock = lock.readLock(); myWriteLock = lock.writeLock(); } private void scheduleUpdate(@NotNull final ID<?, ?> indexId, @NotNull Computable<Boolean> update, VirtualFile file, final int inputId, final boolean hasContent) { if (runUpdate(false, update)) { ConcurrencyUtil.withLock(myReadLock, ()->{ UpdatableIndex<?, ?, FileContent> index = getIndex(indexId); if (hasContent) { index.setIndexedStateForFile(inputId, file); } else { index.resetIndexedStateForFile(inputId); } }); } } private boolean needsFileContentLoading(@NotNull ID<?, ?> indexId) { return !myNotRequiringContentIndices.contains(indexId); } @Nullable private IndexableFileSet getIndexableSetForFile(VirtualFile file) { for (IndexableFileSet set : myIndexableSets) { if (set.isInSet(file)) { return set; } } return null; } private void doTransientStateChangeForFile(int fileId, @NotNull VirtualFile file) { waitUntilIndicesAreInitialized(); if (!clearUpToDateStateForPsiIndicesOfUnsavedDocuments(file, IndexingStamp.getNontrivialFileIndexedStates(fileId))) { // change in persistent file clearUpToDateStateForPsiIndicesOfVirtualFile(file); } } private void doInvalidateIndicesForFile(int fileId, @NotNull VirtualFile file, boolean contentChanged) { waitUntilIndicesAreInitialized(); cleanProcessedFlag(file); List<ID<?, ?>> nontrivialFileIndexedStates = IndexingStamp.getNontrivialFileIndexedStates(fileId); Collection<ID<?, ?>> fileIndexedStatesToUpdate = ContainerUtil.intersection(nontrivialFileIndexedStates, myRequiringContentIndices); if (contentChanged) { // only mark the file as outdated, reindex will be done lazily if (!fileIndexedStatesToUpdate.isEmpty()) { //noinspection ForLoopReplaceableByForEach for (int i = 0, size = nontrivialFileIndexedStates.size(); i < size; ++i) { final ID<?, ?> indexId = nontrivialFileIndexedStates.get(i); if (needsFileContentLoading(indexId)) { getIndex(indexId).resetIndexedStateForFile(fileId); } } // transient index value can depend on disk value because former is diff to latter removeTransientFileDataFromIndices(nontrivialFileIndexedStates, fileId, file); // the file is for sure not a dir and it was previously indexed by at least one index if (file.isValid()) { if(!isTooLarge(file)) myChangedFilesCollector.scheduleForUpdate(file); else myChangedFilesCollector.scheduleForUpdate(new DeletedVirtualFileStub((VirtualFileWithId)file)); } else { LOG.info("Unexpected state in update:" + file); } } } else { // file was removed for (ID<?, ?> indexId : nontrivialFileIndexedStates) { if (myNotRequiringContentIndices.contains(indexId)) { updateSingleIndex(indexId, null, fileId, null); } } if(!fileIndexedStatesToUpdate.isEmpty()) { // its data should be (lazily) wiped for every index myChangedFilesCollector.scheduleForUpdate(new DeletedVirtualFileStub((VirtualFileWithId)file)); } else { myChangedFilesCollector.removeScheduledFileFromUpdate(file); // no need to update it anymore } } } private void scheduleFileForIndexing(int fileId, @NotNull VirtualFile file, boolean contentChange) { // handle 'content-less' indices separately boolean fileIsDirectory = file.isDirectory(); if (!contentChange) { FileContent fileContent = null; for (ID<?, ?> indexId : fileIsDirectory ? myIndicesForDirectories : myNotRequiringContentIndices) { if (getInputFilter(indexId).acceptInput(file)) { if (fileContent == null) { fileContent = new FileContentImpl(file); } updateSingleIndex(indexId, file, fileId, fileContent); } } } // For 'normal indices' schedule the file for update and reset stamps for all affected indices (there // can be client that used indices between before and after events, in such case indices are up to date due to force update // with old content) if (!fileIsDirectory) { if (!file.isValid() || isTooLarge(file)) { // large file might be scheduled for update in before event when its size was not large myChangedFilesCollector.removeScheduledFileFromUpdate(file); } else { myFileTypeManager.freezeFileTypeTemporarilyIn(file, () -> { final List<ID<?, ?>> candidates = getAffectedIndexCandidates(file); boolean scheduleForUpdate = false; //noinspection ForLoopReplaceableByForEach for (int i = 0, size = candidates.size(); i < size; ++i) { final ID<?, ?> indexId = candidates.get(i); if (needsFileContentLoading(indexId) && getInputFilter(indexId).acceptInput(file)) { getIndex(indexId).resetIndexedStateForFile(fileId); scheduleForUpdate = true; } } if (scheduleForUpdate) { IndexingStamp.flushCache(fileId); myChangedFilesCollector.scheduleForUpdate(file); } else if (file instanceof VirtualFileSystemEntry) { ((VirtualFileSystemEntry)file).setFileIndexed(true); } }); } } } private final class ChangedFilesCollector extends IndexedFilesListener { private final IntObjectMap<VirtualFile> myFilesToUpdate = ContainerUtil.createConcurrentIntObjectMap(); private final AtomicInteger myProcessedEventIndex = new AtomicInteger(); private final Phaser myWorkersFinishedSync = new Phaser() { @Override protected boolean onAdvance(int phase, int registeredParties) { return false; } }; private final Executor myVfsEventsExecutor = SequentialTaskExecutor.createSequentialApplicationPoolExecutor("FileBasedIndex Vfs Event Processor"); private final AtomicInteger myScheduledVfsEventsWorkers = new AtomicInteger(); ChangedFilesCollector(@NotNull ManagingFS managingFS) { super(managingFS); } @Override protected void buildIndicesForFileRecursively(@NotNull VirtualFile file, boolean contentChange) { cleanProcessedFlag(file); if (!contentChange) { myUpdatingFiles.incrementAndGet(); } super.buildIndicesForFileRecursively(file, contentChange); if (!contentChange) { if (myUpdatingFiles.decrementAndGet() == 0) { myFilesModCount.incrementAndGet(); } } } @Override protected void iterateIndexableFiles(@NotNull VirtualFile file, @NotNull ContentIterator iterator) { for (IndexableFileSet set : myIndexableSets) { if (set.isInSet(file)) { set.iterateIndexableFilesIn(file, iterator); } } } void scheduleForUpdate(VirtualFile file) { if (!(file instanceof DeletedVirtualFileStub)) { IndexableFileSet setForFile = getIndexableSetForFile(file); if (setForFile == null) { return; } } final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); final VirtualFile previousVirtualFile = myFilesToUpdate.put(fileId, file); if (previousVirtualFile instanceof DeletedVirtualFileStub && !previousVirtualFile.equals(file)) { assert ((DeletedVirtualFileStub)previousVirtualFile).getOriginalFile().equals(file); ((DeletedVirtualFileStub)previousVirtualFile).setResurrected(true); myFilesToUpdate.put(fileId, previousVirtualFile); } } private void removeScheduledFileFromUpdate(VirtualFile file) { final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); final VirtualFile previousVirtualFile = myFilesToUpdate.remove(fileId); if (previousVirtualFile instanceof DeletedVirtualFileStub) { assert ((DeletedVirtualFileStub)previousVirtualFile).getOriginalFile().equals(file); ((DeletedVirtualFileStub)previousVirtualFile).setResurrected(false); myFilesToUpdate.put(fileId, previousVirtualFile); } } private void removeFileIdFromFilesScheduledForUpdate(int fileId) { myFilesToUpdate.remove(fileId); } Collection<VirtualFile> getAllFilesToUpdate() { ensureUpToDate(); if (myFilesToUpdate.isEmpty()) { return Collections.emptyList(); } return new ArrayList<>(myFilesToUpdate.values()); } @Override @NotNull public AsyncFileListener.ChangeApplier prepareChange(@NotNull List<? extends VFileEvent> events) { boolean shouldCleanup = ContainerUtil.exists(events, this::memoryStorageCleaningNeeded); ChangeApplier superApplier = super.prepareChange(events); return new AsyncFileListener.ChangeApplier() { @Override public void beforeVfsChange() { if (shouldCleanup) { cleanupMemoryStorage(false); } superApplier.beforeVfsChange(); } @Override public void afterVfsChange() { superApplier.afterVfsChange(); if (myInitialized) ensureUpToDateAsync(); } }; } private boolean memoryStorageCleaningNeeded(VFileEvent event) { Object requestor = event.getRequestor(); return requestor instanceof FileDocumentManager || requestor instanceof PsiManager || requestor == LocalHistory.VFS_EVENT_REQUESTOR; } boolean isScheduledForUpdate(VirtualFile file) { return myFilesToUpdate.containsKey(Math.abs(getIdMaskingNonIdBasedFile(file))); } void ensureUpToDate() { if (!isUpToDateCheckEnabled()) { return; } //assert ApplicationManager.getApplication().isReadAccessAllowed() || ShutDownTracker.isShutdownHookRunning(); waitUntilIndicesAreInitialized(); if (ApplicationManager.getApplication().isReadAccessAllowed()) { processFilesInReadAction(); } else { processFilesInReadActionWithYieldingToWriteAction(); } } void ensureUpToDateAsync() { if (getEventMerger().getApproximateChangesCount() >= 20 && myScheduledVfsEventsWorkers.compareAndSet(0,1)) { myVfsEventsExecutor.execute(this::scheduledEventProcessingInReadActionWithYieldingToWriteAction); if (Registry.is("try.starting.dumb.mode.where.many.files.changed")) { Runnable startDumbMode = () -> { for (Project project : ProjectManager.getInstance().getOpenProjects()) { DumbServiceImpl dumbService = DumbServiceImpl.getInstance(project); DumbModeTask task = FileBasedIndexProjectHandler.createChangedFilesIndexingTask(project); if (task != null) { dumbService.queueTask(task); } } }; Application app = ApplicationManager.getApplication(); if (!app.isHeadlessEnvironment() /*avoid synchronous ensureUpToDate to prevent deadlock*/ && app.isDispatchThread() && !LaterInvocator.isInModalContext()) { startDumbMode.run(); } else { app.invokeLater(startDumbMode, ModalityState.NON_MODAL); } } } } private void processFilesInReadAction() { assert ApplicationManager.getApplication().isReadAccessAllowed(); // no vfs events -> event processing code can finish int publishedEventIndex = getEventMerger().getPublishedEventIndex(); int processedEventIndex = myProcessedEventIndex.get(); if (processedEventIndex == publishedEventIndex) { return; } myWorkersFinishedSync.register(); int phase = myWorkersFinishedSync.getPhase(); try { getEventMerger().processChanges(info -> ConcurrencyUtil.withLock(myWriteLock, () -> { try { ProgressManager.getInstance().executeNonCancelableSection(() -> { int fileId = info.getFileId(); VirtualFile file = info.getFile(); if (info.isTransientStateChanged()) doTransientStateChangeForFile(fileId, file); if (info.isBeforeContentChanged()) FileBasedIndexImpl.this.doInvalidateIndicesForFile(fileId, file, true); if (info.isContentChanged()) scheduleFileForIndexing(fileId, file, true); if (info.isFileRemoved()) FileBasedIndexImpl.this.doInvalidateIndicesForFile(fileId, file, false); if (info.isFileAdded()) scheduleFileForIndexing(fileId, file, false); }); } finally { IndexingStamp.flushCache(info.getFileId()); } return true; }) ); } finally { myWorkersFinishedSync.arriveAndDeregister(); } myWorkersFinishedSync.awaitAdvance(phase); if (getEventMerger().getPublishedEventIndex() == publishedEventIndex) { myProcessedEventIndex.compareAndSet(processedEventIndex, publishedEventIndex); } } private void processFilesInReadActionWithYieldingToWriteAction() { while (getEventMerger().hasChanges()) { if (!ProgressIndicatorUtils.runInReadActionWithWriteActionPriority(this::processFilesInReadAction)) { ProgressIndicatorUtils.yieldToPendingWriteActions(); } } } private void scheduledEventProcessingInReadActionWithYieldingToWriteAction() { try { processFilesInReadActionWithYieldingToWriteAction(); } finally { myScheduledVfsEventsWorkers.decrementAndGet(); } } } private boolean clearUpToDateStateForPsiIndicesOfUnsavedDocuments(@NotNull VirtualFile file, Collection<? extends ID<?, ?>> affectedIndices) { if (!myUpToDateIndicesForUnsavedOrTransactedDocuments.isEmpty()) { myUpToDateIndicesForUnsavedOrTransactedDocuments.clear(); } Document document = myFileDocumentManager.getCachedDocument(file); if (document != null && myFileDocumentManager.isDocumentUnsaved(document)) { // will be reindexed in indexUnsavedDocuments myLastIndexedDocStamps.clearForDocument(document); // Q: non psi indices document.putUserData(ourFileContentKey, null); return true; } removeTransientFileDataFromIndices(ContainerUtil.intersection(affectedIndices, myPsiDependentIndices), getFileId(file), file); return false; } static int getIdMaskingNonIdBasedFile(@NotNull VirtualFile file) { return file instanceof VirtualFileWithId ?((VirtualFileWithId)file).getId() : IndexingStamp.INVALID_FILE_ID; } private class UnindexedFilesFinder implements CollectingContentIterator { private final List<VirtualFile> myFiles = new ArrayList<>(); private final boolean myDoTraceForFilesToBeIndexed = LOG.isTraceEnabled(); @NotNull @Override public List<VirtualFile> getFiles() { List<VirtualFile> files; synchronized (myFiles) { files = myFiles; } // When processing roots concurrently myFiles looses the local order of local vs archive files // If we process the roots in 2 threads we can just separate local vs archive // IMPORTANT: also remove duplicated file that can appear due to roots intersection BitSet usedFileIds = new BitSet(files.size()); List<VirtualFile> localFileSystemFiles = new ArrayList<>(files.size() / 2); List<VirtualFile> archiveFiles = new ArrayList<>(files.size() / 2); for(VirtualFile file:files) { int fileId = ((VirtualFileWithId)file).getId(); if (usedFileIds.get(fileId)) continue; usedFileIds.set(fileId); if (file.getFileSystem() instanceof LocalFileSystem) localFileSystemFiles.add(file); else archiveFiles.add(file); } localFileSystemFiles.addAll(archiveFiles); return localFileSystemFiles; } @Override public boolean processFile(@NotNull final VirtualFile file) { return ReadAction.compute(() -> { if (!file.isValid()) { return true; } if (file instanceof VirtualFileSystemEntry && ((VirtualFileSystemEntry)file).isFileIndexed()) { return true; } if (!(file instanceof VirtualFileWithId)) { return true; } myFileTypeManager.freezeFileTypeTemporarilyIn(file, () -> { boolean isUptoDate = true; if (file.isDirectory() || !isTooLarge(file)) { final List<ID<?, ?>> affectedIndexCandidates = getAffectedIndexCandidates(file); //noinspection ForLoopReplaceableByForEach for (int i = 0, size = affectedIndexCandidates.size(); i < size; ++i) { final ID<?, ?> indexId = affectedIndexCandidates.get(i); try { if (needsFileContentLoading(indexId) && shouldIndexFile(file, indexId)) { if (myDoTraceForFilesToBeIndexed) { LOG.trace("Scheduling indexing of " + file + " by request of index " + indexId); } synchronized (myFiles) { myFiles.add(file); } isUptoDate = false; break; } } catch (RuntimeException e) { final Throwable cause = e.getCause(); if (cause instanceof IOException || cause instanceof StorageException) { LOG.info(e); requestRebuild(indexId); } else { throw e; } } } } FileContent fileContent = null; int inputId = Math.abs(getIdMaskingNonIdBasedFile(file)); for (ID<?, ?> indexId : myNotRequiringContentIndices) { if (shouldIndexFile(file, indexId)) { if (fileContent == null) { fileContent = new FileContentImpl(file); } updateSingleIndex(indexId, file, inputId, fileContent); } } IndexingStamp.flushCache(inputId); if (isUptoDate && file instanceof VirtualFileSystemEntry) { ((VirtualFileSystemEntry)file).setFileIndexed(true); } }); ProgressManager.checkCanceled(); return true; }); } } private boolean shouldIndexFile(@NotNull VirtualFile file, @NotNull ID<?, ?> indexId) { return getInputFilter(indexId).acceptInput(file) && (isMock(file) || !getIndex(indexId).isIndexedStateForFile(((NewVirtualFile)file).getId(), file)); } static boolean isMock(final VirtualFile file) { return !(file instanceof NewVirtualFile); } private boolean isTooLarge(@NotNull VirtualFile file) { if (SingleRootFileViewProvider.isTooLargeForIntelligence(file)) { return !myNoLimitCheckTypes.contains(file.getFileType()) || SingleRootFileViewProvider.isTooLargeForContentLoading(file); } return false; } private boolean isTooLarge(@NotNull VirtualFile file, long contentSize) { if (SingleRootFileViewProvider.isTooLargeForIntelligence(file, contentSize)) { return !myNoLimitCheckTypes.contains(file.getFileType()) || SingleRootFileViewProvider.isTooLargeForContentLoading(file, contentSize); } return false; } @NotNull CollectingContentIterator createContentIterator(@Nullable ProgressIndicator indicator) { return new UnindexedFilesFinder(); } @Override public void registerIndexableSet(@NotNull IndexableFileSet set, @Nullable Project project) { myIndexableSets.add(set); myIndexableSetToProjectMap.put(set, project); if (project != null) { ((PsiManagerImpl)PsiManager.getInstance(project)).addTreeChangePreprocessor(event -> { if (event.isGenericChange() && event.getCode() == PsiTreeChangeEventImpl.PsiEventType.CHILDREN_CHANGED) { PsiFile file = event.getFile(); if (file != null) { VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile instanceof VirtualFileWithId) { myChangedFilesCollector.getEventMerger().recordTransientStateChangeEvent(virtualFile); } } } }); } } private void clearUpToDateStateForPsiIndicesOfVirtualFile(VirtualFile virtualFile) { if (virtualFile instanceof VirtualFileWithId) { int fileId = ((VirtualFileWithId)virtualFile).getId(); boolean wasIndexed = false; List<ID<?, ?>> candidates = getAffectedIndexCandidates(virtualFile); for (ID<?, ?> candidate : candidates) { if (myPsiDependentIndices.contains(candidate)) { if(getInputFilter(candidate).acceptInput(virtualFile)) { getIndex(candidate).resetIndexedStateForFile(fileId); wasIndexed = true; } } } if (wasIndexed) { myChangedFilesCollector.scheduleForUpdate(virtualFile); IndexingStamp.flushCache(fileId); } } } @Override public void removeIndexableSet(@NotNull IndexableFileSet set) { if (!myIndexableSetToProjectMap.containsKey(set)) return; myIndexableSets.remove(set); myIndexableSetToProjectMap.remove(set); for (VirtualFile file : myChangedFilesCollector.getAllFilesToUpdate()) { final int fileId = Math.abs(getIdMaskingNonIdBasedFile(file)); if (!file.isValid()) { removeDataFromIndicesForFile(fileId, file); myChangedFilesCollector.removeFileIdFromFilesScheduledForUpdate(fileId); } else if (getIndexableSetForFile(file) == null) { // todo remove data from indices for removed myChangedFilesCollector.removeFileIdFromFilesScheduledForUpdate(fileId); } } IndexingStamp.flushCaches(); } @Override public VirtualFile findFileById(Project project, int id) { return IndexInfrastructure.findFileById((PersistentFS)ManagingFS.getInstance(), id); } @Nullable private static PsiFile findLatestKnownPsiForUncomittedDocument(@NotNull Document doc, @NotNull Project project) { return PsiDocumentManager.getInstance(project).getCachedPsiFile(doc); } private static void cleanupProcessedFlag() { final VirtualFile[] roots = ManagingFS.getInstance().getRoots(); for (VirtualFile root : roots) { cleanProcessedFlag(root); } } private static void cleanProcessedFlag(@NotNull final VirtualFile file) { if (!(file instanceof VirtualFileSystemEntry)) return; final VirtualFileSystemEntry nvf = (VirtualFileSystemEntry)file; if (file.isDirectory()) { nvf.setFileIndexed(false); for (VirtualFile child : nvf.getCachedChildren()) { cleanProcessedFlag(child); } } else { nvf.setFileIndexed(false); } } @Override public void iterateIndexableFilesConcurrently(@NotNull ContentIterator processor, @NotNull Project project, @NotNull ProgressIndicator indicator) { PushedFilePropertiesUpdaterImpl.invokeConcurrentlyIfPossible(collectScanRootRunnables(processor, project, indicator)); } @Override public void iterateIndexableFiles(@NotNull final ContentIterator processor, @NotNull final Project project, final ProgressIndicator indicator) { for(Runnable r: collectScanRootRunnables(processor, project, indicator)) r.run(); } @NotNull private static List<Runnable> collectScanRootRunnables(@NotNull final ContentIterator processor, @NotNull final Project project, final ProgressIndicator indicator) { FileBasedIndexScanRunnableCollector collector = FileBasedIndexScanRunnableCollector.getInstance(project); return collector.collectScanRootRunnables(processor, indicator); } private final class DocumentUpdateTask extends UpdateTask<Document> { private final ID<?, ?> myIndexId; DocumentUpdateTask(ID<?, ?> indexId) { myIndexId = indexId; } @Override void doProcess(Document document, Project project) { indexUnsavedDocument(document, myIndexId, project, myFileDocumentManager.getFile(document)); } } private class FileIndexDataInitialization extends IndexInfrastructure.DataInitialization<IndexConfiguration> { private final IndexConfiguration state = new IndexConfiguration(); private final Set<ID> versionChangedIndexes = ContainerUtil.newConcurrentSet(); private boolean currentVersionCorrupted; private SerializationManagerEx mySerializationManagerEx; private void initAssociatedDataForExtensions() { long started = System.nanoTime(); Iterator<FileBasedIndexExtension> extensions = IndexInfrastructure.hasIndices() ? ((ExtensionPointImpl<FileBasedIndexExtension>)FileBasedIndexExtension.EXTENSION_POINT_NAME.getPoint(null)).iterator() : Collections.emptyIterator(); // todo: init contentless indices first ? while (extensions.hasNext()) { FileBasedIndexExtension<?, ?> extension = extensions.next(); if (extension == null) break; ID<?, ?> name = extension.getName(); RebuildStatus.registerIndex(name); myUnsavedDataUpdateTasks.put(name, new DocumentUpdateTask(name)); if (!extension.dependsOnFileContent()) { if (extension.indexDirectories()) myIndicesForDirectories.add(name); myNotRequiringContentIndices.add(name); } else { myRequiringContentIndices.add(name); } if (isPsiDependentIndex(extension)) myPsiDependentIndices.add(name); myNoLimitCheckTypes.addAll(extension.getFileTypesWithSizeLimitNotApplicable()); addNestedInitializationTask(() -> { try { if (registerIndexer(extension, state)) { versionChangedIndexes.add(extension.getName()); } } catch (IOException io) { throw io; } catch (Throwable t) { PluginManager.handleComponentError(t, extension.getClass().getName(), null); } }); } myExtensionsRelatedDataWasLoaded = true; LOG.info("File index extensions iterated:" + (System.nanoTime() - started) / 1000000); } @Override protected void prepare() { initAssociatedDataForExtensions(); mySerializationManagerEx = SerializationManagerEx.getInstanceEx(); File indexRoot = PathManager.getIndexRoot(); PersistentIndicesConfiguration.loadConfiguration(); final File corruptionMarker = new File(indexRoot, CORRUPTION_MARKER_NAME); currentVersionCorrupted = IndexInfrastructure.hasIndices() && corruptionMarker.exists(); if (currentVersionCorrupted) { FileUtil.deleteWithRenaming(indexRoot); indexRoot.mkdirs(); // serialization manager is initialized before and use removed index root so we need to reinitialize it mySerializationManagerEx.reinitializeNameStorage(); ID.reinitializeDiskStorage(); PersistentIndicesConfiguration.saveConfiguration(); FileUtil.delete(corruptionMarker); } } @Override protected void onThrowable(@NotNull Throwable t) { LOG.error(t); } @Override protected IndexConfiguration finish() { try { state.finalizeFileTypeMappingForIndices(); String rebuildNotification = null; if (currentVersionCorrupted) { rebuildNotification = "Index files on disk are corrupted. Indices will be rebuilt."; } else if (!versionChangedIndexes.isEmpty()) { String changedIndexesText = versionChangedIndexes.stream().map(id -> id.getName()).collect(Collectors.joining(", ")); rebuildNotification = "Index file format has changed for " + changedIndexesText + " indices. These indices will be rebuilt."; } if (rebuildNotification != null && !ApplicationManager.getApplication().isHeadlessEnvironment() && Registry.is("ide.showIndexRebuildMessage")) { NOTIFICATIONS.createNotification("Index Rebuild", rebuildNotification, NotificationType.INFORMATION, null).notify(null); } state.freeze(); myState = state; // memory barrier // check if rebuild was requested for any index during registration for (ID<?, ?> indexId : state.getIndexIDs()) { try { RebuildStatus.clearIndexIfNecessary(indexId, () -> clearIndex(indexId)); } catch (StorageException e) { requestRebuild(indexId); LOG.error(e); } } registerIndexableSet(new AdditionalIndexableFileSet(), null); return state; } finally { ShutDownTracker.getInstance().registerShutdownTask(FileBasedIndexImpl.this::performShutdown); saveRegisteredIndicesAndDropUnregisteredOnes(state.getIndexIDs()); myFlushingFuture = FlushingDaemon.everyFiveSeconds(new Runnable() { private int lastModCount; @Override public void run() { mySerializationManagerEx.flushNameStorage(); int currentModCount = myLocalModCount.get(); if (lastModCount == currentModCount) { flushAllIndices(lastModCount); } lastModCount = currentModCount; } }); myAllIndicesInitializedFuture = IndexInfrastructure.submitGenesisTask(() -> { if (!myShutdownPerformed.get()) { myChangedFilesCollector.ensureUpToDateAsync(); } return null; }); myInitialized = true; // this will ensure that all changes to component's state will be visible to other threads } } } @Override public void invalidateCaches() { File indexRoot = PathManager.getIndexRoot(); LOG.info("Requesting explicit indices invalidation", new Throwable()); try { final File corruptionMarker = new File(indexRoot, CORRUPTION_MARKER_NAME); //noinspection IOResourceOpenedButNotSafelyClosed new FileOutputStream(corruptionMarker).close(); } catch (Throwable ignore) { } } @TestOnly public void waitForVfsEventsExecuted(long timeout, @NotNull TimeUnit unit) throws Exception { ApplicationManager.getApplication().assertIsDispatchThread(); long deadline = System.nanoTime() + unit.toNanos(timeout); while (System.nanoTime() < deadline) { try { ((BoundedTaskExecutor)myChangedFilesCollector.myVfsEventsExecutor).waitAllTasksExecuted(100, TimeUnit.MILLISECONDS); return; } catch (TimeoutException e) { UIUtil.dispatchAllInvocationEvents(); } } } public synchronized FileContentHashIndex getFileContentHashIndex(@NotNull File enumeratorPath) { UpdatableIndex<Integer, Void, FileContent> index = getState().getIndex(FileContentHashIndexExtension.HASH_INDEX_ID); if (index == null) { try { registerIndexer(FileContentHashIndexExtension.create(enumeratorPath, this), myState); } catch (IOException e) { throw new RuntimeException(e); } } else return (FileContentHashIndex)index; return (FileContentHashIndex)getState().getIndex(FileContentHashIndexExtension.HASH_INDEX_ID); } private static final boolean INDICES_ARE_PSI_DEPENDENT_BY_DEFAULT = SystemProperties.getBooleanProperty("idea.indices.psi.dependent.default", true); static boolean isPsiDependentIndex(@NotNull IndexExtension<?, ?, ?> extension) { if (INDICES_ARE_PSI_DEPENDENT_BY_DEFAULT) { return extension instanceof FileBasedIndexExtension && ((FileBasedIndexExtension<?, ?>)extension).dependsOnFileContent() && !(extension instanceof DocumentChangeDependentIndex); } else { return extension instanceof PsiDependentIndex; } } }
don't traverse all indices for directories in changed file collector GitOrigin-RevId: 2a0cfe07c23036ef1c63c75365f3d99c32a66757
platform/lang-impl/src/com/intellij/util/indexing/FileBasedIndexImpl.java
don't traverse all indices for directories in changed file collector
Java
apache-2.0
28df07f31460148048c8100165d7a89ce7696fa4
0
skedgo/android-maps-utils,ajju4455/android-maps-utils,jiu9x9uij/GoogleMapUtilsDemo,finkcloud/android-maps-utils,build3r/android-maps-utils,Akylas/android-maps-utils,rxl194/android-maps-utils,h2ri/android-maps-utils,xsincrueldadx/android-maps-utils,noberasco/android-maps-utils,hermance/android-maps-utils,ardock/android-maps-utils,zamesilyasa/android-maps-utils,sujaybhowmick/android-maps-utils,mansigoel/android-maps-utils,Epidilius/android-maps-utils,panzerfahrer/android-maps-utils,Raizlabs/android-maps-utils,googlemaps/android-maps-utils,canlasd/Map-Plotting,johnjohndoe/android-maps-utils,googlemaps/android-maps-utils,tomquist/android-maps-utils,barbeau/android-maps-utils,vishal1011/android-maps-utils,markmcd/android-maps-utils,SamoshkinR-Tem/android-maps-utils,budmul354/android-maps-utils,chodanunsrinil/map,impraveen/android-maps-utils,didldum/android-maps-utils,stephenmcd/android-maps-utils,sufeiiz/android-maps-utils,ifrhector/android-maps-utils,niray/android-maps-utils,googlemaps/android-maps-utils,yura0202/android-maps-utils,wesleym/android-maps-utils,rovkinmax/Anrroid-Map-Utils,JonReppDoneD/Google-Maps-Android-API,Munazza/android-maps-utils,erpragatisingh/android-maps-utils,commonsguy/android-maps-utils
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.maps.android.quadtree; import com.google.maps.android.geometry.Bounds; import com.google.maps.android.geometry.Point; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * A quad tree which tracks items with a Point geometry. * See http://en.wikipedia.org/wiki/Quadtree for details on the data structure. * This class is not thread safe. */ @Deprecated // Experimental. public class PointQuadTree<T extends PointQuadTree.Item> { public interface Item { Point getPoint(); } /** * The bounds of this quad. */ private final Bounds mBounds; /** * The depth of this quad in the tree. */ private final int mDepth; /** * Maximum number of elements to store in a quad before splitting. */ private final static int MAX_ELEMENTS = 10; /** * The elements inside this quad, if any. */ private List<T> mItems; /** * Maximum depth. */ private final static int MAX_DEPTH = 30; /** * Child quads. */ private PointQuadTree[] mChildren = null; /** * Creates a new quad tree with specified bounds. * @param minX * @param maxX * @param minY * @param maxY */ public PointQuadTree(double minX, double maxX, double minY, double maxY) { this(new Bounds(minX, maxX, minY, maxY)); } public PointQuadTree(Bounds bounds) { this(bounds, 0); } private PointQuadTree(double minX, double minY, double maxX, double maxY, int depth) { this(new Bounds(minX, maxX, minY, maxY), depth); } private PointQuadTree(Bounds bounds, int depth) { mBounds = bounds; mDepth = depth; } /** * Insert an item. */ public void add(T item) { Point point = item.getPoint(); insert(point.x, point.y, item); } private boolean insert(double x, double y, T item) { if (!this.mBounds.contains(x, y)) { return false; } if (this.mChildren != null) { for (PointQuadTree<T> quad : mChildren) { if (quad.insert(x, y, item)) { return true; } } return false; // should not happen } if (mItems == null) { mItems = new ArrayList<T>(); } mItems.add(item); if (mItems.size() > MAX_ELEMENTS && mDepth < MAX_DEPTH) { split(); } return true; } /** * Split this quad. */ private void split() { mChildren = new PointQuadTree[]{ new PointQuadTree(mBounds.minX, mBounds.minY, mBounds.midX, mBounds.midY, mDepth + 1), new PointQuadTree(mBounds.midX, mBounds.minY, mBounds.maxX, mBounds.midY, mDepth + 1), new PointQuadTree(mBounds.minX, mBounds.midY, mBounds.midX, mBounds.maxY, mDepth + 1), new PointQuadTree(mBounds.midX, mBounds.midY, mBounds.maxX, mBounds.maxY, mDepth + 1) }; List<T> items = mItems; mItems = null; for (T item : items) { // re-insert items into child quads. add(item); } } /** * Remove the given item from the set. * @return whether the item was removed. */ public boolean remove(T item) { Point point = item.getPoint(); return remove(point.x, point.y, item); } private boolean remove(double x, double y, T item) { if (!this.mBounds.contains(x, y)) { return false; } if (mChildren != null) { for (PointQuadTree<T> quad : mChildren) { if (quad.remove(x, y, item)) { return true; } } return false; } else { return mItems.remove(item); } } public void clear() { mChildren = null; if (mItems != null) { mItems.clear(); } } /** * Search for all items within a given bounds. */ public Set<T> search(Bounds searchBounds) { final HashSet<T> results = new HashSet<T>(); search(searchBounds, results); return results; } private void search(Bounds searchBounds, Set<T> results) { if (!mBounds.intersects(searchBounds)) { return; } if (this.mChildren != null) { for (PointQuadTree<T> quad : mChildren) { quad.search(searchBounds, results); } } else if (mItems != null && !mItems.isEmpty()) { for (T item : mItems) { if (searchBounds.contains(item.getPoint())) { results.add(item); } } } } }
library/src/com/google/maps/android/quadtree/PointQuadTree.java
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.maps.android.quadtree; import com.google.maps.android.geometry.Bounds; import com.google.maps.android.geometry.Point; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * A quad tree which tracks items with a Point geometry. * See http://en.wikipedia.org/wiki/Quadtree for details on the data structure. * This class is not thread safe. */ @Deprecated // Experimental. public class PointQuadTree<T extends PointQuadTree.Item> { public static interface Item { Point getPoint(); } /** * The bounds of this quad. */ private final Bounds mBounds; /** * The depth of this quad in the tree. */ private final int mDepth; /** * Maximum number of elements to store in a quad before splitting. */ private final static int MAX_ELEMENTS = 10; /** * The elements inside this quad, if any. */ private List<T> mItems; /** * Maximum depth. */ private final static int MAX_DEPTH = 30; /** * Child quads. */ private PointQuadTree[] mChildren = null; /** * Creates a new quad tree with specified bounds. * @param minX * @param maxX * @param minY * @param maxY */ public PointQuadTree(double minX, double maxX, double minY, double maxY) { this(new Bounds(minX, maxX, minY, maxY)); } public PointQuadTree(Bounds bounds) { this(bounds, 0); } private PointQuadTree(double minX, double minY, double maxX, double maxY, int depth) { this(new Bounds(minX, maxX, minY, maxY), depth); } private PointQuadTree(Bounds bounds, int depth) { mBounds = bounds; mDepth = depth; } /** * Insert an item. */ public void add(T item) { Point point = item.getPoint(); insert(point.x, point.y, item); } private boolean insert(double x, double y, T item) { if (!this.mBounds.contains(x, y)) { return false; } if (this.mChildren != null) { for (PointQuadTree<T> quad : mChildren) { if (quad.insert(x, y, item)) { return true; } } return false; // should not happen } if (mItems == null) { mItems = new ArrayList<T>(); } mItems.add(item); if (mItems.size() > MAX_ELEMENTS && mDepth < MAX_DEPTH) { split(); } return true; } /** * Split this quad. */ private void split() { mChildren = new PointQuadTree[]{ new PointQuadTree(mBounds.minX, mBounds.minY, mBounds.midX, mBounds.midY, mDepth + 1), new PointQuadTree(mBounds.midX, mBounds.minY, mBounds.maxX, mBounds.midY, mDepth + 1), new PointQuadTree(mBounds.minX, mBounds.midY, mBounds.midX, mBounds.maxY, mDepth + 1), new PointQuadTree(mBounds.midX, mBounds.midY, mBounds.maxX, mBounds.maxY, mDepth + 1) }; List<T> items = mItems; mItems = null; for (T item : items) { // re-insert items into child quads. add(item); } } /** * Remove the given item from the set. * @return whether the item was removed. */ public boolean remove(T item) { Point point = item.getPoint(); return remove(point.x, point.y, item); } private boolean remove(double x, double y, T item) { if (!this.mBounds.contains(x, y)) { return false; } if (mChildren != null) { for (PointQuadTree<T> quad : mChildren) { if (quad.remove(x, y, item)) { return true; } } return false; } else { return mItems.remove(item); } } public void clear() { mChildren = null; if (mItems != null) { mItems.clear(); } } /** * Search for all items within a given bounds. */ public Set<T> search(Bounds searchBounds) { final HashSet<T> results = new HashSet<T>(); search(searchBounds, results); return results; } private void search(Bounds searchBounds, Set<T> results) { if (!mBounds.intersects(searchBounds)) { return; } if (this.mChildren != null) { for (PointQuadTree<T> quad : mChildren) { quad.search(searchBounds, results); } } else if (mItems != null && !mItems.isEmpty()) { for (T item : mItems) { if (searchBounds.contains(item.getPoint())) { results.add(item); } } } } }
Remove unneeded "static" qualifier in QuadTree.Item interface (thanks Cyril)
library/src/com/google/maps/android/quadtree/PointQuadTree.java
Remove unneeded "static" qualifier in QuadTree.Item interface (thanks Cyril)
Java
apache-2.0
f03c4503a315107476e2199c715e68d601bc895d
0
spk83/risk
package org.risk.client; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import com.google.common.collect.ImmutableMap; public final class GameApi { public static final String ALL = "ALL"; public static final String PLAYER_ID = "playerId"; public static final String PLAYER_NAME = "playerName"; public static final String PLAYER_TOKENS = "playerTokens"; public static final String PLAYER_PROFILE_PIC_URL = "playerProfilePicUrl"; /** playerId for the Artificial Intelligence (AI) player. */ public static final int AI_PLAYER_ID = 0; /** playerId for a user viewing a match; a viewer can't make any moves in the game. */ public static final int VIEWER_ID = -1; public interface Container { void sendGameReady(); void sendMakeMove(List<Operation> operations); } public static class VerifyMove extends Message { protected final List<Map<String, Object>> playersInfo; protected final Map<String, Object> state; protected final Map<String, Object> lastState; /** * You should verify this lastMove is legal given lastState; some imperfect information * games will need to also examine state to determine if the lastMove was legal. */ protected final List<Operation> lastMove; /** * lastMovePlayerId can either be the ID of a player in playersInfo, * or 0 for the Artificial Intelligence (AI) player. */ protected final int lastMovePlayerId; /** * The number of tokens each player currently has in the pot (see {@link AttemptChangeTokens}); * The sum of values is always non-negative (i.e., the total pot can NOT be negative). * If the game ends when the total pot is non-zero, * the pot is given to the player with the highest score (see {@link EndGame}), * or if all players have the same score then the pot is distributed evenly. */ protected final Map<Integer, Integer> playerIdToNumberOfTokensInPot; public VerifyMove(List<Map<String, Object>> playersInfo, Map<String, Object> state, Map<String, Object> lastState, List<Operation> lastMove, int lastMovePlayerId, Map<Integer, Integer> playerIdToNumberOfTokensInPot) { this.playersInfo = checkHasJsonSupportedType(playersInfo); this.state = checkHasJsonSupportedType(state); this.lastState = checkHasJsonSupportedType(lastState); this.lastMove = lastMove; this.lastMovePlayerId = checkHasJsonSupportedType(lastMovePlayerId); this.playerIdToNumberOfTokensInPot = playerIdToNumberOfTokensInPot; } @Override public String getMessageName() { return "VerifyMove"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList( "playersInfo", playersInfo, "state", state, "lastState", lastState, "lastMove", lastMove, "lastMovePlayerId", lastMovePlayerId, "playerIdToNumberOfTokensInPot", playerIdToNumberOfTokensInPot); } public Map<Integer, Integer> getPlayerIdToNumberOfTokensInPot() { return playerIdToNumberOfTokensInPot; } public List<Map<String, Object>> getPlayersInfo() { return playersInfo; } public Map<String, Object> getState() { return state; } public List<Integer> getPlayerIds() { List<Integer> playerIds = new ArrayList<>(); for (Map<String, Object> playerInfo : getPlayersInfo()) { playerIds.add((Integer) playerInfo.get(PLAYER_ID)); } return playerIds; } public int getPlayerIndex(int playerId) { return getPlayerIds().indexOf(playerId); } public Map<String, Object> getPlayerInfo(int playerId) { for (Map<String, Object> playerInfo : getPlayersInfo()) { if (playerId == (Integer) playerInfo.get(PLAYER_ID)) { return playerInfo; } } return null; } public String getPlayerName(int playerId) { return String.valueOf(getPlayerInfo(playerId).get(PLAYER_NAME)); } public int getPlayerTokens(int playerId) { return (Integer) (getPlayerInfo(playerId).get(PLAYER_TOKENS)); } public String getPlayerProfilePicUrl(int playerId) { return String.valueOf(getPlayerInfo(playerId).get(PLAYER_PROFILE_PIC_URL)); } public Map<String, Object> getLastState() { return lastState; } public List<Operation> getLastMove() { return lastMove; } public int getLastMovePlayerId() { return lastMovePlayerId; } } public static class UpdateUI extends VerifyMove { /** * yourPlayerId can either be the ID of a player in playersInfo, * or 0 for the Artificial Intelligence (AI) player, * or -1 to represent that you're VIEWING a match (i.e., you're not one of the players and * therefore you cannot make moves). */ protected final int yourPlayerId; public UpdateUI(int yourPlayerId, List<Map<String, Object>> playersInfo, Map<String, Object> state, Map<String, Object> lastState, List<Operation> lastMove, int lastMovePlayerId, Map<Integer, Integer> playerIdToNumberOfTokensInPot) { super(playersInfo, state, lastState, lastMove, lastMovePlayerId, playerIdToNumberOfTokensInPot); this.yourPlayerId = yourPlayerId; } @Override public String getMessageName() { return "UpdateUI"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList( "yourPlayerId", yourPlayerId, "playersInfo", playersInfo, "state", state, "lastState", lastState, "lastMove", lastMove, "lastMovePlayerId", lastMovePlayerId, "playerIdToNumberOfTokensInPot", playerIdToNumberOfTokensInPot); } public int getYourPlayerId() { return yourPlayerId; } public boolean isAiPlayer() { return yourPlayerId == AI_PLAYER_ID; } public boolean isViewer() { return yourPlayerId == VIEWER_ID; } public int getYourPlayerIndex() { return getPlayerIds().indexOf(yourPlayerId); } } public abstract static class Operation extends Message { } public static class EndGame extends Operation { private final Map<Integer, Integer> playerIdToScore; public EndGame(Map<Integer, Integer> playerIdToScore) { this.playerIdToScore = ImmutableMap.copyOf(playerIdToScore); } public EndGame(int winnerPlayerId) { Map<Integer, Integer> strPlayerIdToScore = new HashMap<>(); strPlayerIdToScore.put(winnerPlayerId, 1); this.playerIdToScore = ImmutableMap.copyOf(strPlayerIdToScore); } @Override public String getMessageName() { return "EndGame"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("playerIdToScore", playerIdToScore); } public Map<Integer, Integer> getPlayerIdToScore() { return playerIdToScore; } } public static class Set extends Operation { private final String key; private final Object value; private final Object visibleToPlayerIds; public Set(String key, Object value) { this(key, value, ALL); } public Set(String key, Object value, List<Integer> visibleToPlayerIds) { this(key, value, (Object) visibleToPlayerIds); } private Set(String key, Object value, Object visibleToPlayerIds) { this.key = key; this.value = checkHasJsonSupportedType(value); this.visibleToPlayerIds = checkHasJsonSupportedType(visibleToPlayerIds); } @Override public String getMessageName() { return "Set"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList( "key", key, "value", value, "visibleToPlayerIds", visibleToPlayerIds); } public String getKey() { return key; } public Object getValue() { return value; } public Object getVisibleToPlayerIds() { return visibleToPlayerIds; } } /** * An operation to set a random integer in the range [from,to), * so from {@code from} (inclusive) until {@code to} (exclusive). */ public static class SetRandomInteger extends Operation { private final String key; private final int from; private final int to; public SetRandomInteger(String key, int from, int to) { this.key = key; this.from = from; this.to = to; } @Override public String getMessageName() { return "SetRandomInteger"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("key", key, "from", from, "to", to); } public String getKey() { return key; } public int getFrom() { return from; } public int getTo() { return to; } } public static class SetVisibility extends Operation { private final String key; private final Object visibleToPlayerIds; public SetVisibility(String key) { this(key, ALL); } public SetVisibility(String key, List<Integer> visibleToPlayerIds) { this(key, (Object) visibleToPlayerIds); } private SetVisibility(String key, Object visibleToPlayerIds) { this.key = key; this.visibleToPlayerIds = checkHasJsonSupportedType(visibleToPlayerIds); } @Override public String getMessageName() { return "SetVisibility"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("key", key, "visibleToPlayerIds", visibleToPlayerIds); } public String getKey() { return key; } public Object getVisibleToPlayerIds() { return visibleToPlayerIds; } } public static class SetTurn extends Operation { private final int playerId; /** The number of seconds playerId will have to send MakeMove; * if it is 0 then the container will decide on the time limit * (or the container may decide that there is no time limit). */ private final int numberOfSecondsForTurn; public SetTurn(int playerId) { this(playerId, 0); } public SetTurn(int playerId, int numberOfSecondsForTurn) { this.playerId = playerId; this.numberOfSecondsForTurn = numberOfSecondsForTurn; } @Override public String getMessageName() { return "SetTurn"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("playerId", playerId, "numberOfSecondsForTurn", numberOfSecondsForTurn); } public int getPlayerId() { return playerId; } public int getNumberOfSecondsForTurn() { return numberOfSecondsForTurn; } } public static class Delete extends Operation { private final String key; public Delete(String key) { this.key = key; } @Override public String getMessageName() { return "Delete"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("key", key); } public String getKey() { return key; } } public static class AttemptChangeTokens extends Operation { /** * Map each playerId to the number of tokens that should be increased/decreased. * The server will verify that the total change in tokens (in playerIdToTokenChange) * is equal to minus the total change in the pot (in playerIdToNumberOfTokensInPot). * * For example, suppose the total pot is initially empty, i.e., * playerIdToNumberOfTokensInPot={} (see {@link VerifyMove}) * Then you do the operation: * AttemptChangeTokens({42: -3000, 43: -2000}, {42: 3000, 43: 2000}) * If playerId=42 indeed has at least 3000 tokens and playerId=43 has at least 2000 tokens * then the operation will succeed and the total pot will have 5000 tokens and you will have * in {@link VerifyMove}: * playerIdToNumberOfTokensInPot={42: 3000, 43: 2000} * If one of the players does not have sufficient token then the operation will fail, and * playerIdToNumberOfTokensInPot={} * * Assume the operation succeeded. As the game continues, playerId=43 might risk more money: * AttemptChangeTokens({43: -3000}, {42: 3000, 43: 5000}) * and if he has enough tokens then the total pot will increase to 8000: * playerIdToNumberOfTokensInPot={42: 3000, 43: 5000} * When the game ends you should distribute the pot, e.g., if the game ends in a tie you could * call: * AttemptChangeTokens({42: 4000, 43: 4000}, {42: 0, 43:0}) * and then the total pot will be 0. * If the game ends when the total pot is non-zero, * the pot is given to the player with the highest score (see {@link EndGame}). */ private final Map<Integer, Integer> playerIdToTokenChange; /** * The number of tokens each player currently has in the pot; * The sum of values is always non-negative (i.e., the total pot can NOT be negative). * When the game ends, the pot is given to the player with the highest score. */ protected final Map<Integer, Integer> playerIdToNumberOfTokensInPot; public AttemptChangeTokens(Map<Integer, Integer> playerIdToTokenChange, Map<Integer, Integer> playerIdToNumberOfTokensInPot) { this.playerIdToTokenChange = ImmutableMap.copyOf(playerIdToTokenChange); this.playerIdToNumberOfTokensInPot = ImmutableMap.copyOf(playerIdToNumberOfTokensInPot); } @Override public String getMessageName() { return "AttemptChangeTokens"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("playerIdToTokenChange", playerIdToTokenChange, "playerIdToNumberOfTokensInPot", playerIdToNumberOfTokensInPot); } public Map<Integer, Integer> getPlayerIdToTokenChange() { return playerIdToTokenChange; } public Map<Integer, Integer> getPlayerIdToNumberOfTokensInPot() { return playerIdToNumberOfTokensInPot; } } public static class Shuffle extends Operation { private final List<String> keys; public Shuffle(List<String> keys) { this.keys = checkHasJsonSupportedType(keys); } @Override public String getMessageName() { return "Shuffle"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("keys", keys); } public List<String> getKeys() { return keys; } } public static class GameReady extends Message { @Override public String getMessageName() { return "GameReady"; } } public static class MakeMove extends Message { private final List<Operation> operations; public MakeMove(List<Operation> operations) { this.operations = operations; } @Override public String getMessageName() { return "MakeMove"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("operations", operations); } public List<Operation> getOperations() { return operations; } } public static class VerifyMoveDone extends Message { private final int hackerPlayerId; private final String message; /** Move is verified, i.e., no hacker found. */ public VerifyMoveDone() { this(0, null); } /** Hacker found! */ public VerifyMoveDone(int hackerPlayerId, String message) { this.hackerPlayerId = hackerPlayerId; this.message = message; } @Override public String getMessageName() { return "VerifyMoveDone"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("hackerPlayerId", hackerPlayerId, "message", message); } public int getHackerPlayerId() { return hackerPlayerId; } public String getMessage() { return message; } } public static class RequestManipulator extends Message { @Override public String getMessageName() { return "RequestManipulator"; } } public static class ManipulateState extends Message { private final Map<String, Object> state; public ManipulateState(Map<String, Object> state) { this.state = checkHasJsonSupportedType(state); } @Override public String getMessageName() { return "ManipulateState"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.asList("state", state); } public Map<String, Object> getOperations() { return state; } } public static class ManipulationDone extends Message { private final List<Operation> operations; public ManipulationDone(List<Operation> operations) { this.operations = operations; } @Override public String getMessageName() { return "ManipulationDone"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.asList("operations", operations); } public List<Operation> getOperations() { return operations; } } public abstract static class Message { public abstract String getMessageName(); public List<Object> getFieldsNameAndValue() { return Arrays.asList(); } @Override public int hashCode() { return getFieldsNameAndValue().hashCode() ^ getMessageName().hashCode(); } @Override public boolean equals(Object obj) { if (!(obj instanceof Message)) { return false; } Message other = (Message) obj; return Objects.equals(other.getFieldsNameAndValue(), getFieldsNameAndValue()) && Objects.equals(other.getMessageName(), getMessageName()); } @Override public String toString() { return toMessage().toString(); } private List<?> listToMessage(List<?> values) { if (values.isEmpty() || !(values.get(0) instanceof Message)) { return values; } List<Object> messages = new ArrayList<>(); for (Object operation : values) { messages.add(((Message) operation).toMessage()); } return messages; } public Map<String, Object> toMessage() { Map<String, Object> message = new HashMap<>(); message.put("type", getMessageName()); List<Object> fieldsNameAndValue = getFieldsNameAndValue(); for (int i = 0; i < fieldsNameAndValue.size() / 2; i++) { String fieldName = (String) fieldsNameAndValue.get(2 * i); Object fieldValue = fieldsNameAndValue.get(2 * i + 1); // If the field value is a list of operations (lastMove/operations), // then we need to convert each operation to a message if (fieldValue instanceof List) { fieldValue = listToMessage((List<?>) fieldValue); } message.put(fieldName, fieldValue); } return message; } @SuppressWarnings("unchecked") private static List<Operation> messageToOperationList(Object operationMessagesObj) { List<?> operationMessages = (List<?>) operationMessagesObj; List<Operation> operations = new ArrayList<>(); for (Object operationMessage : operationMessages) { operations.add((Operation) messageToHasEquality((Map<String, Object>) operationMessage)); } return operations; } @SuppressWarnings("unchecked") public static Message messageToHasEquality(Map<String, Object> message) { String type = (String) message.get("type"); switch (type) { case "UpdateUI": return new UpdateUI( (Integer) message.get("yourPlayerId"), (List<Map<String, Object>>) message.get("playersInfo"), (Map<String, Object>) message.get("state"), (Map<String, Object>) message.get("lastState"), messageToOperationList(message.get("lastMove")), (Integer) message.get("lastMovePlayerId"), toIntegerMap(message.get("playerIdToNumberOfTokensInPot"))); case "VerifyMove": return new VerifyMove( (List<Map<String, Object>>) message.get("playersInfo"), (Map<String, Object>) message.get("state"), (Map<String, Object>) message.get("lastState"), messageToOperationList(message.get("lastMove")), (Integer) message.get("lastMovePlayerId"), toIntegerMap(message.get("playerIdToNumberOfTokensInPot"))); case "EndGame": return new EndGame(toIntegerMap(message.get("playerIdToScore"))); case "Set": return new Set((String) message.get("key"), message.get("value"), message.get("visibleToPlayerIds")); case "SetRandomInteger": return new SetRandomInteger( (String) message.get("key"), (Integer) message.get("from"), (Integer) message.get("to")); case "SetVisibility": return new SetVisibility( (String) message.get("key"), message.get("visibleToPlayerIds")); case "SetTurn": return new SetTurn((Integer) message.get("playerId"), (Integer) message.get("numberOfSecondsForTurn")); case "Delete": return new Delete((String) message.get("key")); case "AttemptChangeTokens": return new AttemptChangeTokens(toIntegerMap(message.get("playerIdToTokenChange")), toIntegerMap(message.get("playerIdToNumberOfTokensInPot"))); case "Shuffle": return new Shuffle((List<String>) message.get("keys")); case "GameReady": return new GameReady(); case "MakeMove": return new MakeMove(messageToOperationList(message.get("operations"))); case "VerifyMoveDone": return new VerifyMoveDone( (Integer) message.get("hackerPlayerId"), (String) message.get("message")); case "RequestManipulator": return new RequestManipulator(); case "ManipulateState": return new ManipulateState((Map<String, Object>) message.get("state")); case "ManipulationDone": return new ManipulationDone(messageToOperationList(message.get("operations"))); default: return null; } } } static Map<Integer, Integer> toIntegerMap(Object objMap) { Map<?, ?> map = (Map<?, ?>) objMap; Map<Integer, Integer> result = new HashMap<>(); for (Object key : map.keySet()) { Object value = map.get(key); result.put(key instanceof Integer ? (Integer) key : Integer.parseInt(key.toString()), value instanceof Integer ? (Integer) value : Integer.parseInt(value.toString())); } return result; } /** * Checks the object has a JSON-supported data type, i.e., * the object is either a primitive (String, Integer, Double, Boolean, null) * or the object is a List and every element in the list has a JSON-supported data type, * or the object is a Map and the keys are String and the values have JSON-supported data types. * @return the given object. */ static <T> T checkHasJsonSupportedType(T object) { if (object == null) { return object; } if (object instanceof Integer || object instanceof Double || object instanceof String || object instanceof Boolean) { return object; } if (object instanceof List) { List<?> list = (List<?>) object; for (Object element : list) { checkHasJsonSupportedType(element); } return object; } if (object instanceof Map) { Map<?, ?> map = (Map<?, ?>) object; for (Object key : map.keySet()) { if (!(key instanceof String)) { throw new IllegalArgumentException("Keys in a map must be String! key=" + key); } } for (Object value : map.values()) { checkHasJsonSupportedType(value); } return object; } throw new IllegalArgumentException( "The object doesn't have a JSON-supported data type! object=" + object); } private GameApi() { } }
src/org/risk/client/GameApi.java
package org.risk.client; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import com.google.common.collect.ImmutableMap; public final class GameApi { public static final String ALL = "ALL"; public static final String PLAYER_ID = "playerId"; public static final String PLAYER_NAME = "playerName"; public static final String PLAYER_TOKENS = "playerTokens"; public static final String PLAYER_PROFILE_PIC_URL = "playerProfilePicUrl"; /** playerId for the Artificial Intelligence (AI) player. */ public static final int AI_PLAYER_ID = 0; /** playerId for a user viewing a match; a viewer can't make any moves in the game. */ public static final int VIEWER_ID = -1; public static class VerifyMove extends HasEquality { protected final List<Map<String, Object>> playersInfo; protected final Map<String, Object> state; protected final Map<String, Object> lastState; /** * You should verify this lastMove is legal given lastState; some imperfect information * games will need to also examine state to determine if the lastMove was legal. */ protected final List<Operation> lastMove; /** * lastMovePlayerId can either be the ID of a player in playersInfo, * or 0 for the Artificial Intelligence (AI) player. */ protected final int lastMovePlayerId; /** * The number of tokens each player currently has in the pot (see {@link AttemptChangeTokens}); * The sum of values is always non-negative (i.e., the total pot can NOT be negative). * If the game ends when the total pot is non-zero, * the pot is given to the player with the highest score (see {@link EndGame}). */ protected final Map<Integer, Integer> playerIdToNumberOfTokensInPot; public VerifyMove(List<Map<String, Object>> playersInfo, Map<String, Object> state, Map<String, Object> lastState, List<Operation> lastMove, int lastMovePlayerId, Map<Integer, Integer> playerIdToNumberOfTokensInPot) { this.playersInfo = checkHasJsonSupportedType(playersInfo); this.state = checkHasJsonSupportedType(state); this.lastState = checkHasJsonSupportedType(lastState); this.lastMove = lastMove; this.lastMovePlayerId = checkHasJsonSupportedType(lastMovePlayerId); this.playerIdToNumberOfTokensInPot = playerIdToNumberOfTokensInPot; } @Override public String getClassName() { return "VerifyMove"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList( "playersInfo", playersInfo, "state", state, "lastState", lastState, "lastMove", lastMove, "lastMovePlayerId", lastMovePlayerId, "playerIdToNumberOfTokensInPot", playerIdToNumberOfTokensInPot); } public Map<Integer, Integer> getPlayerIdToNumberOfTokensInPot() { return playerIdToNumberOfTokensInPot; } public List<Map<String, Object>> getPlayersInfo() { return playersInfo; } public Map<String, Object> getState() { return state; } public List<Integer> getPlayerIds() { List<Integer> playerIds = new ArrayList<>(); for (Map<String, Object> playerInfo : getPlayersInfo()) { playerIds.add((Integer) playerInfo.get(PLAYER_ID)); } return playerIds; } public int getPlayerIndex(int playerId) { return getPlayerIds().indexOf(playerId); } public Map<String, Object> getPlayerInfo(int playerId) { for (Map<String, Object> playerInfo : getPlayersInfo()) { if (playerId == (Integer) playerInfo.get(PLAYER_ID)) { return playerInfo; } } return null; } public String getPlayerName(int playerId) { return String.valueOf(getPlayerInfo(playerId).get(PLAYER_NAME)); } public int getPlayerTokens(int playerId) { return (Integer) (getPlayerInfo(playerId).get(PLAYER_TOKENS)); } public String getPlayerProfilePicUrl(int playerId) { return String.valueOf(getPlayerInfo(playerId).get(PLAYER_PROFILE_PIC_URL)); } public Map<String, Object> getLastState() { return lastState; } public List<Operation> getLastMove() { return lastMove; } public int getLastMovePlayerId() { return lastMovePlayerId; } } public static class UpdateUI extends VerifyMove { /** * yourPlayerId can either be the ID of a player in playersInfo, * or 0 for the Artificial Intelligence (AI) player, * or -1 to represent that you're VIEWING a match (i.e., you're not one of the players and * therefore you cannot make moves). */ protected final int yourPlayerId; public UpdateUI(int yourPlayerId, List<Map<String, Object>> playersInfo, Map<String, Object> state, Map<String, Object> lastState, List<Operation> lastMove, int lastMovePlayerId, Map<Integer, Integer> playerIdToNumberOfTokensInPot) { super(playersInfo, state, lastState, lastMove, lastMovePlayerId, playerIdToNumberOfTokensInPot); this.yourPlayerId = yourPlayerId; } @Override public String getClassName() { return "UpdateUI"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList( "yourPlayerId", yourPlayerId, "playersInfo", playersInfo, "state", state, "lastState", lastState, "lastMove", lastMove, "lastMovePlayerId", lastMovePlayerId, "playerIdToNumberOfTokensInPot", playerIdToNumberOfTokensInPot); } public int getYourPlayerId() { return yourPlayerId; } public boolean isAiPlayer() { return yourPlayerId == AI_PLAYER_ID; } public boolean isViewer() { return yourPlayerId == VIEWER_ID; } public int getYourPlayerIndex() { return getPlayerIds().indexOf(yourPlayerId); } } public abstract static class Operation extends HasEquality { } public static class EndGame extends Operation { private final Map<Integer, Integer> playerIdToScore; public EndGame(Map<Integer, Integer> playerIdToScore) { this.playerIdToScore = ImmutableMap.copyOf(playerIdToScore); } public EndGame(int winnerPlayerId) { Map<Integer, Integer> strPlayerIdToScore = new HashMap<>(); strPlayerIdToScore.put(winnerPlayerId, 1); this.playerIdToScore = ImmutableMap.copyOf(strPlayerIdToScore); } @Override public String getClassName() { return "EndGame"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("playerIdToScore", playerIdToScore); } public Map<Integer, Integer> getPlayerIdToScore() { return playerIdToScore; } } public static class Set extends Operation { private final String key; private final Object value; private final Object visibleToPlayerIds; public Set(String key, Object value) { this(key, value, ALL); } public Set(String key, Object value, List<Integer> visibleToPlayerIds) { this(key, value, (Object) visibleToPlayerIds); } private Set(String key, Object value, Object visibleToPlayerIds) { this.key = key; this.value = checkHasJsonSupportedType(value); this.visibleToPlayerIds = checkHasJsonSupportedType(visibleToPlayerIds); } @Override public String getClassName() { return "Set"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList( "key", key, "value", value, "visibleToPlayerIds", visibleToPlayerIds); } public String getKey() { return key; } public Object getValue() { return value; } public Object getVisibleToPlayerIds() { return visibleToPlayerIds; } } /** * An operation to set a random integer in the range [from,to), * so from {@code from} (inclusive) until {@code to} (exclusive). */ public static class SetRandomInteger extends Operation { private final String key; private final int from; private final int to; public SetRandomInteger(String key, int from, int to) { this.key = key; this.from = from; this.to = to; } @Override public String getClassName() { return "SetRandomInteger"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("key", key, "from", from, "to", to); } public String getKey() { return key; } public int getFrom() { return from; } public int getTo() { return to; } } public static class SetVisibility extends Operation { private final String key; private final Object visibleToPlayerIds; public SetVisibility(String key) { this(key, ALL); } public SetVisibility(String key, List<Integer> visibleToPlayerIds) { this(key, (Object) visibleToPlayerIds); } private SetVisibility(String key, Object visibleToPlayerIds) { this.key = key; this.visibleToPlayerIds = checkHasJsonSupportedType(visibleToPlayerIds); } @Override public String getClassName() { return "SetVisibility"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("key", key, "visibleToPlayerIds", visibleToPlayerIds); } public String getKey() { return key; } public Object getVisibleToPlayerIds() { return visibleToPlayerIds; } } public static class SetTurn extends Operation { private final int playerId; /** The number of seconds playerId will have to send MakeMove; * if it is 0 then the container will decide on the time limit * (or the container may decide that there is no time limit). */ private final int numberOfSecondsForTurn; public SetTurn(int playerId) { this(playerId, 0); } public SetTurn(int playerId, int numberOfSecondsForTurn) { this.playerId = playerId; this.numberOfSecondsForTurn = numberOfSecondsForTurn; } @Override public String getClassName() { return "SetTurn"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("playerId", playerId, "numberOfSecondsForTurn", numberOfSecondsForTurn); } public int getPlayerId() { return playerId; } public int getNumberOfSecondsForTurn() { return numberOfSecondsForTurn; } } public static class Delete extends Operation { private final String key; public Delete(String key) { this.key = key; } @Override public String getClassName() { return "Delete"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("key", key); } public String getKey() { return key; } } public static class AttemptChangeTokens extends Operation { /** * Map each playerId to the number of tokens that should be increased/decreased. * The server will verify that the total change in tokens (in playerIdToTokenChange) * is equal to minus the total change in the pot (in playerIdToNumberOfTokensInPot). * * For example, suppose the total pot is initially empty, i.e., * playerIdToNumberOfTokensInPot={} (see {@link VerifyMove}) * Then you do the operation: * AttemptChangeTokens({42: -3000, 43: -2000}, {42: 3000, 43: 2000}) * If playerId=42 indeed has at least 3000 tokens and playerId=43 has at least 2000 tokens * then the operation will succeed and the total pot will have 5000 tokens and you will have * in {@link VerifyMove}: * playerIdToNumberOfTokensInPot={42: 3000, 43: 2000} * If one of the players does not have sufficient token then the operation will fail, and * playerIdToNumberOfTokensInPot={} * * Assume the operation succeeded. As the game continues, playerId=43 might risk more money: * AttemptChangeTokens({43: -3000}, {42: 3000, 43: 5000}) * and if he has enough tokens then the total pot will increase to 8000: * playerIdToNumberOfTokensInPot={42: 3000, 43: 5000} * When the game ends you should distribute the pot, e.g., if the game ends in a tie you could * call: * AttemptChangeTokens({42: 4000, 43: 4000}, {42: 0, 43:0}) * and then the total pot will be 0. * If the game ends when the total pot is non-zero, * the pot is given to the player with the highest score (see {@link EndGame}). */ private final Map<Integer, Integer> playerIdToTokenChange; /** * The number of tokens each player currently has in the pot; * The sum of values is always non-negative (i.e., the total pot can NOT be negative). * When the game ends, the pot is given to the player with the highest score. */ protected final Map<Integer, Integer> playerIdToNumberOfTokensInPot; public AttemptChangeTokens(Map<Integer, Integer> playerIdToTokenChange, Map<Integer, Integer> playerIdToNumberOfTokensInPot) { this.playerIdToTokenChange = ImmutableMap.copyOf(playerIdToTokenChange); this.playerIdToNumberOfTokensInPot = ImmutableMap.copyOf(playerIdToNumberOfTokensInPot); } @Override public String getClassName() { return "AttemptChangeTokens"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("playerIdToTokenChange", playerIdToTokenChange, "playerIdToNumberOfTokensInPot", playerIdToNumberOfTokensInPot); } public Map<Integer, Integer> getPlayerIdToTokenChange() { return playerIdToTokenChange; } public Map<Integer, Integer> getPlayerIdToNumberOfTokensInPot() { return playerIdToNumberOfTokensInPot; } } public static class Shuffle extends Operation { private final List<String> keys; public Shuffle(List<String> keys) { this.keys = checkHasJsonSupportedType(keys); } @Override public String getClassName() { return "Shuffle"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("keys", keys); } public List<String> getKeys() { return keys; } } public static class GameReady extends HasEquality { @Override public String getClassName() { return "GameReady"; } } public static class MakeMove extends HasEquality { private final List<Operation> operations; public MakeMove(List<Operation> operations) { this.operations = operations; } @Override public String getClassName() { return "MakeMove"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("operations", operations); } public List<Operation> getOperations() { return operations; } } public static class VerifyMoveDone extends HasEquality { private final int hackerPlayerId; private final String message; /** Move is verified, i.e., no hacker found. */ public VerifyMoveDone() { this(0, null); } /** Hacker found! */ public VerifyMoveDone(int hackerPlayerId, String message) { this.hackerPlayerId = hackerPlayerId; this.message = message; } @Override public String getClassName() { return "VerifyMoveDone"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.<Object>asList("hackerPlayerId", hackerPlayerId, "message", message); } public int getHackerPlayerId() { return hackerPlayerId; } public String getMessage() { return message; } } public static class RequestManipulator extends HasEquality { @Override public String getClassName() { return "RequestManipulator"; } } public static class ManipulateState extends HasEquality { private final Map<String, Object> state; public ManipulateState(Map<String, Object> state) { this.state = checkHasJsonSupportedType(state); } @Override public String getClassName() { return "ManipulateState"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.asList("state", state); } public Map<String, Object> getOperations() { return state; } } public static class ManipulationDone extends HasEquality { private final List<Operation> operations; public ManipulationDone(List<Operation> operations) { this.operations = operations; } @Override public String getClassName() { return "ManipulationDone"; } @Override public List<Object> getFieldsNameAndValue() { return Arrays.asList("operations", operations); } public List<Operation> getOperations() { return operations; } } public abstract static class HasEquality { public abstract String getClassName(); public List<Object> getFieldsNameAndValue() { return Arrays.asList(); } @Override public int hashCode() { return getFieldsNameAndValue().hashCode() ^ getClassName().hashCode(); } @Override public boolean equals(Object obj) { if (!(obj instanceof HasEquality)) { return false; } HasEquality other = (HasEquality) obj; return Objects.equals(other.getFieldsNameAndValue(), getFieldsNameAndValue()) && Objects.equals(other.getClassName(), getClassName()); } @Override public String toString() { return toMessage().toString(); } private List<?> listToMessage(List<?> values) { if (values.isEmpty() || !(values.get(0) instanceof HasEquality)) { return values; } List<Object> messages = new ArrayList<>(); for (Object operation : values) { messages.add(((HasEquality) operation).toMessage()); } return messages; } public Map<String, Object> toMessage() { Map<String, Object> message = new HashMap<>(); message.put("type", getClassName()); List<Object> fieldsNameAndValue = getFieldsNameAndValue(); for (int i = 0; i < fieldsNameAndValue.size() / 2; i++) { String fieldName = (String) fieldsNameAndValue.get(2 * i); Object fieldValue = fieldsNameAndValue.get(2 * i + 1); // If the field value is a list of operations (lastMove/operations), // then we need to convert each operation to a message if (fieldValue instanceof List) { fieldValue = listToMessage((List<?>) fieldValue); } message.put(fieldName, fieldValue); } return message; } @SuppressWarnings("unchecked") private static List<Operation> messageToOperationList(Object operationMessagesObj) { List<?> operationMessages = (List<?>) operationMessagesObj; List<Operation> operations = new ArrayList<>(); for (Object operationMessage : operationMessages) { operations.add((Operation) messageToHasEquality((Map<String, Object>) operationMessage)); } return operations; } @SuppressWarnings("unchecked") public static HasEquality messageToHasEquality(Map<String, Object> message) { String type = (String) message.get("type"); switch (type) { case "UpdateUI": return new UpdateUI( (Integer) message.get("yourPlayerId"), (List<Map<String, Object>>) message.get("playersInfo"), (Map<String, Object>) message.get("state"), (Map<String, Object>) message.get("lastState"), messageToOperationList(message.get("lastMove")), (Integer) message.get("lastMovePlayerId"), toIntegerMap(message.get("playerIdToNumberOfTokensInPot"))); case "VerifyMove": return new VerifyMove( (List<Map<String, Object>>) message.get("playersInfo"), (Map<String, Object>) message.get("state"), (Map<String, Object>) message.get("lastState"), messageToOperationList(message.get("lastMove")), (Integer) message.get("lastMovePlayerId"), toIntegerMap(message.get("playerIdToNumberOfTokensInPot"))); case "EndGame": return new EndGame(toIntegerMap(message.get("playerIdToScore"))); case "Set": return new Set((String) message.get("key"), message.get("value"), message.get("visibleToPlayerIds")); case "SetRandomInteger": return new SetRandomInteger( (String) message.get("key"), (Integer) message.get("from"), (Integer) message.get("to")); case "SetVisibility": return new SetVisibility( (String) message.get("key"), message.get("visibleToPlayerIds")); case "SetTurn": return new SetTurn((Integer) message.get("playerId"), (Integer) message.get("numberOfSecondsForTurn")); case "Delete": return new Delete((String) message.get("key")); case "AttemptChangeTokens": return new AttemptChangeTokens(toIntegerMap(message.get("playerIdToTokenChange")), toIntegerMap(message.get("playerIdToNumberOfTokensInPot"))); case "Shuffle": return new Shuffle((List<String>) message.get("keys")); case "GameReady": return new GameReady(); case "MakeMove": return new MakeMove(messageToOperationList(message.get("operations"))); case "VerifyMoveDone": return new VerifyMoveDone( (Integer) message.get("hackerPlayerId"), (String) message.get("message")); case "RequestManipulator": return new RequestManipulator(); case "ManipulateState": return new ManipulateState((Map<String, Object>) message.get("state")); case "ManipulationDone": return new ManipulationDone(messageToOperationList(message.get("operations"))); default: return null; } } } static Map<Integer, Integer> toIntegerMap(Object objMap) { Map<?, ?> map = (Map<?, ?>) objMap; Map<Integer, Integer> result = new HashMap<>(); for (Object key : map.keySet()) { Object value = map.get(key); result.put(key instanceof Integer ? (Integer) key : Integer.parseInt(key.toString()), value instanceof Integer ? (Integer) value : Integer.parseInt(value.toString())); } return result; } /** * Checks the object has a JSON-supported data type, i.e., * the object is either a primitive (String, Integer, Double, Boolean, null) * or the object is a List and every element in the list has a JSON-supported data type, * or the object is a Map and the keys are String and the values have JSON-supported data types. * @return the given object. */ static <T> T checkHasJsonSupportedType(T object) { if (object == null) { return object; } if (object instanceof Integer || object instanceof Double || object instanceof String || object instanceof Boolean) { return object; } if (object instanceof List) { List<?> list = (List<?>) object; for (Object element : list) { checkHasJsonSupportedType(element); } return object; } if (object instanceof Map) { Map<?, ?> map = (Map<?, ?>) object; for (Object key : map.keySet()) { if (!(key instanceof String)) { throw new IllegalArgumentException("Keys in a map must be String! key=" + key); } } for (Object value : map.values()) { checkHasJsonSupportedType(value); } return object; } throw new IllegalArgumentException( "The object doesn't have a JSON-supported data type! object=" + object); } private GameApi() { } }
Updated the GameAPI.
src/org/risk/client/GameApi.java
Updated the GameAPI.
Java
apache-2.0
64280c5acdbc0e63005ac4d410c4a774edbac32a
0
ligzy/JGroups,belaban/JGroups,vjuranek/JGroups,danberindei/JGroups,pferraro/JGroups,pruivo/JGroups,ibrahimshbat/JGroups,rhusar/JGroups,ibrahimshbat/JGroups,danberindei/JGroups,tristantarrant/JGroups,rpelisse/JGroups,rhusar/JGroups,pferraro/JGroups,ibrahimshbat/JGroups,tristantarrant/JGroups,Sanne/JGroups,belaban/JGroups,Sanne/JGroups,rvansa/JGroups,ibrahimshbat/JGroups,TarantulaTechnology/JGroups,slaskawi/JGroups,Sanne/JGroups,pferraro/JGroups,slaskawi/JGroups,rpelisse/JGroups,rhusar/JGroups,dimbleby/JGroups,deepnarsay/JGroups,dimbleby/JGroups,rvansa/JGroups,kedzie/JGroups,slaskawi/JGroups,TarantulaTechnology/JGroups,rpelisse/JGroups,kedzie/JGroups,kedzie/JGroups,deepnarsay/JGroups,ligzy/JGroups,danberindei/JGroups,vjuranek/JGroups,ligzy/JGroups,vjuranek/JGroups,dimbleby/JGroups,pruivo/JGroups,belaban/JGroups,pruivo/JGroups,deepnarsay/JGroups,TarantulaTechnology/JGroups
package org.jgroups; import org.jgroups.annotations.MBean; import org.jgroups.annotations.ManagedAttribute; import org.jgroups.annotations.ManagedOperation; import org.jgroups.conf.ConfiguratorFactory; import org.jgroups.conf.ProtocolStackConfigurator; import org.jgroups.logging.Log; import org.jgroups.logging.LogFactory; import org.jgroups.protocols.TP; import org.jgroups.stack.ProtocolStack; import org.jgroups.stack.StateTransferInfo; import org.jgroups.util.*; import org.w3c.dom.Element; import java.io.File; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Vector; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Exchanger; /** * JChannel is a pure Java implementation of Channel. * When a JChannel object is instantiated it automatically sets up the * protocol stack. * <p> * <B>Properties</B> * <P> * Properties are used to configure a channel, and are accepted in * several forms; the String form is described here. * A property string consists of a number of properties separated by * colons. For example: * <p> * <pre>"&lt;prop1&gt;(arg1=val1):&lt;prop2&gt;(arg1=val1;arg2=val2):&lt;prop3&gt;:&lt;propn&gt;"</pre> * <p> * Each property relates directly to a protocol layer, which is * implemented as a Java class. When a protocol stack is to be created * based on the above property string, the first property becomes the * bottom-most layer, the second one will be placed on the first, etc.: * the stack is created from the bottom to the top, as the string is * parsed from left to right. Each property has to be the name of a * Java class that resides in the * {@link org.jgroups.protocols} package. * <p> * Note that only the base name has to be given, not the fully specified * class name (e.g., UDP instead of org.jgroups.protocols.UDP). * <p> * Each layer may have 0 or more arguments, which are specified as a * list of name/value pairs in parentheses directly after the property. * In the example above, the first protocol layer has 1 argument, * the second 2, the third none. When a layer is created, these * properties (if there are any) will be set in a layer by invoking * the layer's setProperties() method * <p> * As an example the property string below instructs JGroups to create * a JChannel with protocols UDP, PING, FD and GMS:<p> * <pre>"UDP(mcast_addr=228.10.9.8;mcast_port=5678):PING:FD:GMS"</pre> * <p> * The UDP protocol layer is at the bottom of the stack, and it * should use mcast address 228.10.9.8. and port 5678 rather than * the default IP multicast address and port. The only other argument * instructs FD to output debug information while executing. * Property UDP refers to a class {@link org.jgroups.protocols.UDP}, * which is subsequently loaded and an instance of which is created as protocol layer. * If any of these classes are not found, an exception will be thrown and * the construction of the stack will be aborted. * * @author Bela Ban * @version $Id: JChannel.java,v 1.223 2009/08/21 22:27:25 graywatson Exp $ */ @MBean(description="JGroups channel") public class JChannel extends Channel { /** The default protocol stack used by the default constructor */ public static final String DEFAULT_PROTOCOL_STACK="udp.xml"; protected String properties=null; /*the address of this JChannel instance*/ private UUID local_addr=null; private String name=null; /*the channel (also know as group) name*/ private String cluster_name=null; // group name /*the latest view of the group membership*/ private View my_view=null; /*the queue that is used to receive messages (events) from the protocol stack*/ private final Queue mq=new Queue(); /*the protocol stack, used to send and receive messages from the protocol stack*/ private ProtocolStack prot_stack=null; private final Promise<Boolean> state_promise=new Promise<Boolean>(); private final Exchanger<StateTransferInfo> applstate_exchanger=new Exchanger<StateTransferInfo>(); private final Promise<Boolean> flush_unblock_promise=new Promise<Boolean>(); /*if FLUSH is used channel waits for UNBLOCK event, this is the default timeout, 5 secs*/ private static final long FLUSH_UNBLOCK_TIMEOUT=5000; /*flag to indicate whether to receive blocks, if this is set to true, receive_views is set to true*/ @ManagedAttribute(description="Flag indicating whether to receive blocks",writable=true) private boolean receive_blocks=false; /*flag to indicate whether to receive local messages *if this is set to false, the JChannel will not receive messages sent by itself*/ @ManagedAttribute(description="Flag indicating whether to receive this channel's own messages",writable=true) private boolean receive_local_msgs=true; /*channel connected flag*/ protected boolean connected=false; /*channel closed flag*/ protected boolean closed=false; // close() has been called, channel is unusable /** True if a state transfer protocol is available, false otherwise */ private boolean state_transfer_supported=false; // set by CONFIG event from STATE_TRANSFER protocol /** True if a flush protocol is available, false otherwise */ private volatile boolean flush_supported=false; // set by CONFIG event from FLUSH protocol /** Provides storage for arbitrary objects. Protocols can send up CONFIG events, and all key-value pairs of * a CONFIG event will be added to additional_data. On reconnect, a CONFIG event will be sent down by the channel, * containing all key-value pairs of additional_data */ protected final Map<String,Object> additional_data=new HashMap<String,Object>(); protected final ConcurrentMap<String,Object> config=new ConcurrentHashMap<String,Object>(); protected final Log log=LogFactory.getLog(getClass()); /** Collect statistics */ @ManagedAttribute(description="Collect channel statistics",writable=true) protected boolean stats=true; protected long sent_msgs=0, received_msgs=0, sent_bytes=0, received_bytes=0; private final TP.ProbeHandler probe_handler=new MyProbeHandler(); /** * Used by subclass to create a JChannel without a protocol stack, don't use as application programmer * @deprecated Remove in 3.0 */ protected JChannel(boolean no_op) { ; } /** * Constructs a <code>JChannel</code> instance with the protocol stack * specified by the <code>DEFAULT_PROTOCOL_STACK</code> member. * * @throws ChannelException if problems occur during the initialization of * the protocol stack. */ public JChannel() throws ChannelException { this(DEFAULT_PROTOCOL_STACK); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration contained by the specified file. * * @param properties a file containing a JGroups XML protocol stack * configuration. * * @throws ChannelException if problems occur during the configuration or * initialization of the protocol stack. */ public JChannel(File properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration contained by the specified XML element. * * @param properties a XML element containing a JGroups XML protocol stack * configuration. * * @throws ChannelException if problems occur during the configuration or * initialization of the protocol stack. */ public JChannel(Element properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration indicated by the specified URL. * * @param properties a URL pointing to a JGroups XML protocol stack * configuration. * * @throws ChannelException if problems occur during the configuration or * initialization of the protocol stack. */ public JChannel(URL properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration based upon the specified properties parameter. * * @param properties an old style property string, a string representing a * system resource containing a JGroups XML configuration, * a string representing a URL pointing to a JGroups XML * XML configuration, or a string representing a file name * that contains a JGroups XML configuration. * * @throws ChannelException if problems occur during the configuration and * initialization of the protocol stack. */ public JChannel(String properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration contained by the protocol stack configurator parameter. * <p> * All of the public constructors of this class eventually delegate to this * method. * * @param configurator a protocol stack configurator containing a JGroups * protocol stack configuration. * * @throws ChannelException if problems occur during the initialization of * the protocol stack. */ public JChannel(ProtocolStackConfigurator configurator) throws ChannelException { init(configurator); } /** * Creates a new JChannel with the protocol stack as defined in the properties * parameter. an example of this parameter is<BR> * "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE"<BR> * Other examples can be found in the ./conf directory<BR> * @param properties the protocol stack setup; if null, the default protocol stack will be used. * The properties can also be a java.net.URL object or a string that is a URL spec. * The JChannel will validate any URL object and String object to see if they are a URL. * In case of the parameter being a url, the JChannel will try to load the xml from there. * In case properties is a org.w3c.dom.Element, the ConfiguratorFactory will parse the * DOM tree with the element as its root element. * @deprecated Use the constructors with specific parameter types instead. */ public JChannel(Object properties) throws ChannelException { if (properties == null) properties = DEFAULT_PROTOCOL_STACK; ProtocolStackConfigurator c=null; try { c=ConfiguratorFactory.getStackConfigurator(properties); } catch(Exception x) { throw new ChannelException("unable to load protocol stack", x); } init(c); } /** * Creates a channel with the same configuration as the channel passed to this constructor. This is used by * testing code, and should not be used by any other code ! * @param ch * @throws ChannelException */ public JChannel(JChannel ch) throws ChannelException { init(ch); receive_blocks=ch.receive_blocks; receive_local_msgs=ch.receive_local_msgs; receive_blocks=ch.receive_blocks; } /** * Returns the protocol stack. * Currently used by Debugger. * Specific to JChannel, therefore * not visible in Channel */ public ProtocolStack getProtocolStack() { return prot_stack; } protected Log getLog() { return log; } /** * Returns the protocol stack configuration in string format. An example of this property is<BR> * "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE" */ public String getProperties() { String retval=prot_stack != null? prot_stack.printProtocolSpec(true) : null; if(retval != null) properties=retval; return properties; } public boolean statsEnabled() { return stats; } public void enableStats(boolean stats) { this.stats=stats; } @ManagedOperation public void resetStats() { sent_msgs=received_msgs=sent_bytes=received_bytes=0; } @ManagedAttribute public long getSentMessages() {return sent_msgs;} @ManagedAttribute public long getSentBytes() {return sent_bytes;} @ManagedAttribute public long getReceivedMessages() {return received_msgs;} @ManagedAttribute public long getReceivedBytes() {return received_bytes;} @ManagedAttribute public int getNumberOfTasksInTimer() { TimeScheduler timer=getTimer(); return timer != null? timer.size() : -1; } @ManagedAttribute public int getTimerThreads() { TimeScheduler timer=getTimer(); return timer != null? timer.getCorePoolSize() : -1; } public String dumpTimerQueue() { TimeScheduler timer=getTimer(); return timer != null? timer.dumpTaskQueue() : "<n/a"; } /** * Returns a pretty-printed form of all the protocols. If include_properties * is set, the properties for each protocol will also be printed. */ @ManagedOperation public String printProtocolSpec(boolean include_properties) { ProtocolStack ps=getProtocolStack(); return ps != null? ps.printProtocolSpec(include_properties) : null; } /** * Connects the channel to a group. * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * This method starts the protocol stack by calling ProtocolStack.start, * then it sends an Event.CONNECT event down the stack and waits for the return value. * Once the call returns, the channel listeners are notified and the channel is considered connected. * * @param cluster_name A <code>String</code> denoting the group name. Cannot be null. * @exception ChannelException The protocol stack cannot be started * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. */ @ManagedOperation(description="Connects the channel to a group") public synchronized void connect(String cluster_name) throws ChannelException { connect(cluster_name,true); } /** * Connects the channel to a group. * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * This method starts the protocol stack by calling ProtocolStack.start, * then it sends an Event.CONNECT event down the stack and waits for the return value. * Once the call returns, the channel listeners are notified and the channel is considered connected. * * @param cluster_name A <code>String</code> denoting the group name. Cannot be null. * @exception ChannelException The protocol stack cannot be started * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. */ @ManagedOperation(description="Connects the channel to a group") public synchronized void connect(String cluster_name, boolean useFlushIfPresent) throws ChannelException { if(connected) { if(log.isTraceEnabled()) log.trace("already connected to " + cluster_name); return; } setAddress(); startStack(cluster_name); if(cluster_name != null) { // only connect if we are not a unicast channel Event connect_event = null; if (useFlushIfPresent) { connect_event = new Event(Event.CONNECT_USE_FLUSH, cluster_name); } else { connect_event = new Event(Event.CONNECT, cluster_name); } Object res=downcall(connect_event); // waits forever until connected (or channel is closed) if(res != null && res instanceof Exception) { // the JOIN was rejected by the coordinator stopStack(true, false); init(); throw new ChannelException("connect() failed", (Throwable)res); } //if FLUSH is used do not return from connect() until UNBLOCK event is received if(flushSupported()) { try { flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT); } catch (TimeoutException timeout) { if(log.isWarnEnabled()) log.warn(local_addr + " waiting on UNBLOCK after connect() timed out"); } } } connected=true; notifyChannelConnected(this); } /** * Connects this channel to a group and gets a state from a specified state * provider. * <p> * * This method essentially invokes * <code>connect<code> and <code>getState<code> methods successively. * If FLUSH protocol is in channel's stack definition only one flush is executed for both connecting and * fetching state rather than two flushes if we invoke <code>connect<code> and <code>getState<code> in succesion. * * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * * * @param cluster_name the cluster name to connect to. Cannot be null. * @param target the state provider. If null state will be fetched from coordinator, unless this channel is coordinator. * @param state_id the substate id for partial state transfer. If null entire state will be transferred. * @param timeout the timeout for state transfer. * * @exception ChannelException The protocol stack cannot be started * @exception ChannelException Connecting to cluster was not successful * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. * @exception StateTransferException State transfer was not successful * */ public synchronized void connect(String cluster_name, Address target, String state_id, long timeout) throws ChannelException { connect(cluster_name, target, state_id, timeout,true); } /** * Connects this channel to a group and gets a state from a specified state * provider. * <p> * * This method essentially invokes * <code>connect<code> and <code>getState<code> methods successively. * If FLUSH protocol is in channel's stack definition only one flush is executed for both connecting and * fetching state rather than two flushes if we invoke <code>connect<code> and <code>getState<code> in succesion. * * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * * * @param cluster_name the cluster name to connect to. Cannot be null. * @param target the state provider. If null state will be fetched from coordinator, unless this channel is coordinator. * @param state_id the substate id for partial state transfer. If null entire state will be transferred. * @param timeout the timeout for state transfer. * * @exception ChannelException The protocol stack cannot be started * @exception ChannelException Connecting to cluster was not successful * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. * @exception StateTransferException State transfer was not successful * */ public synchronized void connect(String cluster_name, Address target, String state_id, long timeout, boolean useFlushIfPresent) throws ChannelException { if(connected) { if(log.isTraceEnabled()) log.trace("already connected to " + cluster_name); return; } setAddress(); startStack(cluster_name); boolean stateTransferOk=false; boolean joinSuccessful=false; boolean canFetchState=false; // only connect if we are not a unicast channel if(cluster_name == null) return; try { Event connect_event=null; if(useFlushIfPresent) connect_event=new Event(Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH, cluster_name); else connect_event=new Event(Event.CONNECT_WITH_STATE_TRANSFER, cluster_name); Object res=downcall(connect_event); // waits forever until connected (or channel is closed) joinSuccessful=!(res != null && res instanceof Exception); if(!joinSuccessful) { stopStack(true, false); init(); throw new ChannelException("connect() failed", (Throwable)res); } connected=true; notifyChannelConnected(this); canFetchState=getView() != null && getView().size() > 1; // if I am not the only member in cluster then if(canFetchState) { try { // fetch state from target stateTransferOk=getState(target, state_id, timeout, false); if(!stateTransferOk) { throw new StateTransferException(getAddress() + " could not fetch state " + (state_id == null ? "(full)" : state_id) + " from " + (target == null ? "(all)" : target)); } } catch(Exception e) { throw new StateTransferException(getAddress() + " could not fetch state " + (state_id == null ? "(full)" : state_id) + " from " + (target == null ? "(all)" : target), e); } } } finally { if(flushSupported()) stopFlush(); } } /** * Disconnects the channel if it is connected. If the channel is closed, * this operation is ignored<BR> * Otherwise the following actions happen in the listed order<BR> * <ol> * <li> The JChannel sends a DISCONNECT event down the protocol stack<BR> * <li> Blocks until the event has returned<BR> * <li> Sends a STOP_QUEING event down the stack<BR> * <li> Stops the protocol stack by calling ProtocolStack.stop()<BR> * <li> Notifies the listener, if the listener is available<BR> * </ol> */ @ManagedOperation(description="Disconnects the channel if it is connected") public synchronized void disconnect() { if(closed) return; if(connected) { if(cluster_name != null) { // Send down a DISCONNECT event, which travels down to the GMS, where a response is returned Event disconnect_event=new Event(Event.DISCONNECT, local_addr); down(disconnect_event); // DISCONNECT is handled by each layer } connected=false; stopStack(true, false); notifyChannelDisconnected(this); init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining } } /** * Destroys the channel. * After this method has been called, the channel us unusable.<BR> * This operation will disconnect the channel and close the channel receive queue immediately<BR> */ @ManagedOperation(description="Disconnects and destroys the channel") public synchronized void close() { _close(true, true); // by default disconnect before closing channel and close mq } /** * Shuts down a channel without disconnecting. To be used by tests only, don't use for application purposes */ @ManagedOperation(description="Shuts down the channel without disconnecting") public synchronized void shutdown() { down(new Event(Event.SHUTDOWN)); _close(false, true); // by default disconnect before closing channel and close mq } /** * Opens the channel. Note that the channel is only open, but <em>not connected</em>. * This does the following actions: * <ol> * <li> Resets the receiver queue by calling Queue.reset * <li> Sets up the protocol stack by calling ProtocolStack.setup * <li> Sets the closed flag to false * </ol> * @deprecated With the removal of shunning, this method should not be used anymore */ @Deprecated public synchronized void open() throws ChannelException { if(!closed) throw new ChannelException("channel is already open"); try { mq.reset(); String props=getProperties(); // new stack is created on open() - bela June 12 2003 prot_stack=new ProtocolStack(this, props); prot_stack.setup(); closed=false; } catch(Exception e) { throw new ChannelException("failed to open channel" , e); } } /** * returns true if the Open operation has been called successfully */ @ManagedAttribute public boolean isOpen() { return !closed; } /** * returns true if the Connect operation has been called successfully */ @ManagedAttribute public boolean isConnected() { return connected; } @ManagedAttribute public int getNumMessages() { return mq.size(); } @ManagedOperation public String dumpQueue() { return Util.dumpQueue(mq); } /** * Returns a map of statistics of the various protocols and of the channel itself. * @return Map<String,Map>. A map where the keys are the protocols ("channel" pseudo key is * used for the channel itself") and the values are property maps. */ @ManagedOperation public Map<String,Object> dumpStats() { Map<String,Object> retval=prot_stack.dumpStats(); if(retval != null) { Map<String,Long> tmp=dumpChannelStats(); if(tmp != null) retval.put("channel", tmp); } return retval; } @ManagedOperation public Map<String,Object> dumpStats(String protocol_name) { return prot_stack.dumpStats(protocol_name); } protected Map<String,Long> dumpChannelStats() { Map<String,Long> retval=new HashMap<String,Long>(); retval.put("sent_msgs", new Long(sent_msgs)); retval.put("sent_bytes", new Long(sent_bytes)); retval.put("received_msgs", new Long(received_msgs)); retval.put("received_bytes", new Long(received_bytes)); return retval; } /** * Sends a message through the protocol stack. * Implements the Transport interface. * * @param msg the message to be sent through the protocol stack, * the destination of the message is specified inside the message itself * @exception ChannelNotConnectedException * @exception ChannelClosedException */ @ManagedOperation public void send(Message msg) throws ChannelNotConnectedException, ChannelClosedException { checkClosedOrNotConnected(); if(msg == null) throw new NullPointerException("msg is null"); if(stats) { sent_msgs++; sent_bytes+=msg.getLength(); } down(new Event(Event.MSG, msg)); } /** * creates a new message with the destination address, and the source address * and the object as the message value * @param dst - the destination address of the message, null for all members * @param src - the source address of the message * @param obj - the value of the message * @exception ChannelNotConnectedException * @exception ChannelClosedException * @see JChannel#send */ @ManagedOperation public void send(Address dst, Address src, Serializable obj) throws ChannelNotConnectedException, ChannelClosedException { send(new Message(dst, src, obj)); } /** * Blocking receive method. * This method returns the object that was first received by this JChannel and that has not been * received before. After the object is received, it is removed from the receive queue.<BR> * If you only want to inspect the object received without removing it from the queue call * JChannel.peek<BR> * If no messages are in the receive queue, this method blocks until a message is added or the operation times out<BR> * By specifying a timeout of 0, the operation blocks forever, or until a message has been received. * @param timeout the number of milliseconds to wait if the receive queue is empty. 0 means wait forever * @exception TimeoutException if a timeout occured prior to a new message was received * @exception ChannelNotConnectedException * @exception ChannelClosedException * @see JChannel#peek * @deprecated Use a {@link Receiver} instead */ public Object receive(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException { checkClosedOrNotConnected(); try { Event evt=(timeout <= 0)? (Event)mq.remove() : (Event)mq.remove(timeout); Object retval=getEvent(evt); evt=null; return retval; } catch(QueueClosedException queue_closed) { throw new ChannelClosedException(); } catch(TimeoutException t) { throw t; } catch(Exception e) { if(log.isErrorEnabled()) log.error("exception: " + e); return null; } } /** * Just peeks at the next message, view or block. Does <em>not</em> install * new view if view is received<BR> * Does the same thing as JChannel.receive but doesn't remove the object from the * receiver queue * * @deprecated Use a {@link Receiver} instead */ public Object peek(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException { checkClosedOrNotConnected(); try { Event evt=(timeout <= 0)? (Event)mq.peek() : (Event)mq.peek(timeout); Object retval=getEvent(evt); evt=null; return retval; } catch(QueueClosedException queue_closed) { if(log.isErrorEnabled()) log.error("exception: " + queue_closed); return null; } catch(TimeoutException t) { return null; } catch(Exception e) { if(log.isErrorEnabled()) log.error("exception: " + e); return null; } } /** * Returns the current view. * <BR> * If the channel is not connected or if it is closed it will return null. * <BR> * @return returns the current group view, or null if the channel is closed or disconnected */ public View getView() { return closed || !connected ? null : my_view; } @ManagedAttribute(name="View") public String getViewAsString() { View v=getView(); return v != null ? v.toString() : "n/a"; } @ManagedAttribute public static String getVersion() { return Version.printDescription(); } public Address getLocalAddress() { return getAddress(); } /** * Returns the local address of the channel (null if the channel is closed) */ public Address getAddress() { return closed ? null : local_addr; } @ManagedAttribute(name="Address") public String getAddressAsString() { return local_addr != null? local_addr.toString() : "n/a"; } @ManagedAttribute(name="Address (UUID)") public String getAddressAsUUID() { return local_addr != null? local_addr.toStringLong() : null; } public String getName() { return name; } /** * Sets the logical name for the channel. The name will stay associated with this channel for the channel's * lifetime (until close() is called). This method should be called <em>before</em> calling connect().<br/> * @param name */ @ManagedAttribute(writable=true, description="The logical name of this channel. Stays with the channel until " + "the channel is closed") public void setName(String name) { if(name != null) { this.name=name; if(local_addr != null) { UUID.add(local_addr, this.name); } } } /** * returns the name of the channel * if the channel is not connected or if it is closed it will return null * @deprecated Use {@link #getClusterName()} instead */ public String getChannelName() { return closed ? null : !connected ? null : cluster_name; } @ManagedAttribute(description="Returns cluster name this channel is connected to") public String getClusterName() { return closed ? null : !connected ? null : cluster_name; } /** * Sets a channel option. The options can be one of the following: * <UL> * <LI> Channel.BLOCK * <LI> Channel.LOCAL * <LI> Channel.AUTO_RECONNECT * <LI> Channel.AUTO_GETSTATE * </UL> * <P> * There are certain dependencies between the options that you can set, * I will try to describe them here. * <P> * Option: Channel.BLOCK<BR> * Value: java.lang.Boolean<BR> * Result: set to true will set setOpt(VIEW, true) and the JChannel will receive BLOCKS and VIEW events<BR> *<BR> * Option: LOCAL<BR> * Value: java.lang.Boolean<BR> * Result: set to true the JChannel will receive messages that it self sent out.<BR> *<BR> * Option: AUTO_RECONNECT<BR> * Value: java.lang.Boolean<BR> * Result: set to true and the JChannel will try to reconnect when it is being closed<BR> *<BR> * Option: AUTO_GETSTATE<BR> * Value: java.lang.Boolean<BR> * Result: set to true, the AUTO_RECONNECT will be set to true and the JChannel will try to get the state after a close and reconnect happens<BR> * <BR> * * @param option the parameter option Channel.VIEW, Channel.SUSPECT, etc * @param value the value to set for this option * */ public void setOpt(int option, Object value) { if(closed) { if(log.isWarnEnabled()) log.warn("channel is closed; option not set !"); return; } switch(option) { case VIEW: if(log.isWarnEnabled()) log.warn("option VIEW has been deprecated (it is always true now); this option is ignored"); break; case SUSPECT: if(log.isWarnEnabled()) log.warn("option SUSPECT has been deprecated (it is always true now); this option is ignored"); break; case BLOCK: if(value instanceof Boolean) receive_blocks=((Boolean)value).booleanValue(); else if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " (" + value + "): value has to be Boolean"); break; case GET_STATE_EVENTS: if(log.isWarnEnabled()) log.warn("option GET_STATE_EVENTS has been deprecated (it is always true now); this option is ignored"); break; case LOCAL: if(value instanceof Boolean) receive_local_msgs=((Boolean)value).booleanValue(); else if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " (" + value + "): value has to be Boolean"); break; case AUTO_RECONNECT: if(log.isWarnEnabled()) log.warn("Option AUTO_RECONNECT has been deprecated and is ignored"); break; case AUTO_GETSTATE: if(log.isWarnEnabled()) log.warn("Option AUTO_GETSTATE has been deprecated and is ignored"); break; default: if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known"); break; } } /** * returns the value of an option. * @param option the option you want to see the value for * @return the object value, in most cases java.lang.Boolean * @see JChannel#setOpt */ public Object getOpt(int option) { switch(option) { case VIEW: return Boolean.TRUE; case BLOCK: return receive_blocks ? Boolean.TRUE : Boolean.FALSE; case SUSPECT: return Boolean.TRUE; case AUTO_RECONNECT: return false; case AUTO_GETSTATE: return false; case GET_STATE_EVENTS: return Boolean.TRUE; case LOCAL: return receive_local_msgs ? Boolean.TRUE : Boolean.FALSE; default: if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known"); return null; } } /** * Called to acknowledge a block() (callback in <code>MembershipListener</code> or * <code>BlockEvent</code> received from call to <code>receive()</code>). * After sending blockOk(), no messages should be sent until a new view has been received. * Calling this method on a closed channel has no effect. */ public void blockOk() { } /** * Retrieves a full state from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ public boolean getState(Address target, long timeout) throws ChannelNotConnectedException, ChannelClosedException { return getState(target,null,timeout); } /** * Retrieves a substate (or partial state) indicated by state_id from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param state_id * The ID of the substate. If null, the entire state will be * transferred * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ public boolean getState(Address target, String state_id, long timeout) throws ChannelNotConnectedException, ChannelClosedException { return getState(target, state_id, timeout, true); } /** * Retrieves a substate (or partial state) indicated by state_id from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param state_id * The ID of the substate. If null, the entire state will be * transferred * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * @param useFlushIfPresent * whether channel should be flushed prior to state retrieval * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ public boolean getState(Address target, String state_id, long timeout, boolean useFlushIfPresent) throws ChannelNotConnectedException, ChannelClosedException { Callable<Boolean> flusher = new Callable<Boolean>() { public Boolean call() throws Exception { return Util.startFlush(JChannel.this); } }; return getState(target, state_id, timeout, useFlushIfPresent?flusher:null); } /** * Retrieves a substate (or partial state) indicated by state_id from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param state_id * The ID of the substate. If null, the entire state will be * transferred * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * @param flushInvoker * algorithm invoking flush * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ protected boolean getState(Address target, String state_id, long timeout,Callable<Boolean> flushInvoker) throws ChannelNotConnectedException, ChannelClosedException { checkClosedOrNotConnected(); if(!state_transfer_supported) { throw new IllegalStateException("fetching state will fail as state transfer is not supported. " + "Add one of the STATE_TRANSFER protocols to your protocol configuration"); } if(target == null) target=determineCoordinator(); if(target != null && local_addr != null && target.equals(local_addr)) { if(log.isTraceEnabled()) log.trace("cannot get state from myself (" + target + "): probably the first member"); return false; } boolean initiateFlush = flushSupported() && flushInvoker!=null; if (initiateFlush) { boolean successfulFlush = false; try { successfulFlush = flushInvoker.call(); } catch (Exception e) { successfulFlush = false; // http://jira.jboss.com/jira/browse/JGRP-759 } finally { if (!successfulFlush) { throw new IllegalStateException("Node "+ local_addr+ " could not flush the cluster for state retrieval"); } } } state_promise.reset(); StateTransferInfo state_info=new StateTransferInfo(target, state_id, timeout); down(new Event(Event.GET_STATE, state_info)); Boolean b=state_promise.getResult(state_info.timeout); if(initiateFlush) stopFlush(); boolean state_transfer_successfull = b != null && b.booleanValue(); if(!state_transfer_successfull) down(new Event(Event.RESUME_STABLE)); return state_transfer_successfull; } /** * Retrieves the current group state. Sends GET_STATE event down to STATE_TRANSFER layer. * Blocks until STATE_TRANSFER sends up a GET_STATE_OK event or until <code>timeout</code> * milliseconds have elapsed. The argument of GET_STATE_OK should be a vector of objects. * @param targets - the target members to receive the state from ( an Address list ) * @param timeout - the number of milliseconds to wait for the operation to complete successfully * @return true of the state was received, false if the operation timed out * @deprecated Not really needed - we always want to get the state from a single member, * use {@link #getState(org.jgroups.Address, long)} instead */ public boolean getAllStates(Vector targets, long timeout) throws ChannelNotConnectedException, ChannelClosedException { throw new UnsupportedOperationException("use getState() instead"); } /** * Called by the application is response to receiving a <code>getState()</code> object when * calling <code>receive()</code>. * When the application receives a getState() message on the receive() method, * it should call returnState() to reply with the state of the application * @param state The state of the application as a byte buffer * (to send over the network). */ public void returnState(byte[] state) { try { StateTransferInfo state_info=new StateTransferInfo(null, null, 0L, state); applstate_exchanger.exchange(state_info); } catch(InterruptedException e) { Thread.currentThread().interrupt(); } } /** * Returns a substate as indicated by state_id * @param state * @param state_id */ public void returnState(byte[] state, String state_id) { try { StateTransferInfo state_info=new StateTransferInfo(null, state_id, 0L, state); applstate_exchanger.exchange(state_info); } catch(InterruptedException e) { Thread.currentThread().interrupt(); } } /** * Callback method <BR> * Called by the ProtocolStack when a message is received. * It will be added to the message queue from which subsequent * <code>Receive</code>s will dequeue it. * @param evt the event carrying the message from the protocol stack */ public Object up(Event evt) { int type=evt.getType(); Message msg; switch(type) { case Event.MSG: msg=(Message)evt.getArg(); if(stats) { received_msgs++; received_bytes+=msg.getLength(); } if(!receive_local_msgs) { // discard local messages (sent by myself to me) if(local_addr != null && msg.getSrc() != null) if(local_addr.equals(msg.getSrc())) return null; } break; case Event.VIEW_CHANGE: View tmp=(View)evt.getArg(); if(tmp instanceof MergeView) my_view=new View(tmp.getVid(), tmp.getMembers()); else my_view=tmp; /* * Bela&Vladimir Oct 27th,2006 (JGroups 2.4)- we need to switch to * connected=true because client can invoke channel.getView() in * viewAccepted() callback invoked on this thread * (see Event.VIEW_CHANGE handling below) */ // not good: we are only connected when we returned from connect() - bela June 22 2007 // if(connected == false) { // connected=true; // } break; case Event.CONFIG: Map<String,Object> cfg=(Map<String,Object>)evt.getArg(); if(cfg != null) { if(cfg.containsKey("state_transfer")) { state_transfer_supported=((Boolean)cfg.get("state_transfer")).booleanValue(); } if(cfg.containsKey("flush_supported")) { flush_supported=((Boolean)cfg.get("flush_supported")).booleanValue(); } cfg.putAll(cfg); } break; case Event.GET_STATE_OK: StateTransferInfo state_info=(StateTransferInfo)evt.getArg(); byte[] state=state_info.state; try { if(up_handler != null) { return up_handler.up(evt); } if(state != null) { String state_id=state_info.state_id; if(receiver != null) { try { if(receiver instanceof ExtendedReceiver && state_id != null) ((ExtendedReceiver)receiver).setState(state_id, state); else receiver.setState(state); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling setState() in receiver", t); } } else { try { mq.add(new Event(Event.STATE_RECEIVED, state_info)); } catch(Exception e) { } } } } finally { state_promise.setResult(state != null? Boolean.TRUE : Boolean.FALSE); } break; case Event.STATE_TRANSFER_INPUTSTREAM_CLOSED: state_promise.setResult(Boolean.TRUE); break; case Event.STATE_TRANSFER_INPUTSTREAM: StateTransferInfo sti=(StateTransferInfo)evt.getArg(); InputStream is=sti.inputStream; //Oct 13,2006 moved to down() when Event.STATE_TRANSFER_INPUTSTREAM_CLOSED is received //state_promise.setResult(is != null? Boolean.TRUE : Boolean.FALSE); if(up_handler != null) { return up_handler.up(evt); } if(is != null) { if(receiver instanceof ExtendedReceiver) { try { if(sti.state_id == null) ((ExtendedReceiver)receiver).setState(is); else ((ExtendedReceiver)receiver).setState(sti.state_id, is); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling setState() in receiver", t); } } else if(receiver instanceof Receiver){ if(log.isWarnEnabled()){ log.warn("Channel has STREAMING_STATE_TRANSFER, however," + " application does not implement ExtendedMessageListener. State is not transfered"); Util.close(is); } } else { try { mq.add(new Event(Event.STATE_TRANSFER_INPUTSTREAM, sti)); } catch(Exception e) { } } } break; case Event.GET_LOCAL_ADDRESS: return local_addr; default: break; } // If UpHandler is installed, pass all events to it and return (UpHandler is e.g. a building block) if(up_handler != null) { Object ret=up_handler.up(evt); if(type == Event.UNBLOCK){ flush_unblock_promise.setResult(Boolean.TRUE); } return ret; } switch(type) { case Event.MSG: if(receiver != null) { try { receiver.receive((Message)evt.getArg()); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling receive() in receiver", t); } return null; } break; case Event.VIEW_CHANGE: if(receiver != null) { try { receiver.viewAccepted((View)evt.getArg()); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling viewAccepted() in receiver", t); } return null; } break; case Event.SUSPECT: if(receiver != null) { try { receiver.suspect((Address)evt.getArg()); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling suspect() in receiver", t); } return null; } break; case Event.GET_APPLSTATE: if(receiver != null) { StateTransferInfo state_info=(StateTransferInfo)evt.getArg(); byte[] tmp_state=null; String state_id=state_info.state_id; try { if(receiver instanceof ExtendedReceiver && state_id!=null) { tmp_state=((ExtendedReceiver)receiver).getState(state_id); } else { tmp_state=receiver.getState(); } } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling getState() in receiver", t); } return new StateTransferInfo(null, state_id, 0L, tmp_state); } break; case Event.STATE_TRANSFER_OUTPUTSTREAM: StateTransferInfo sti=(StateTransferInfo)evt.getArg(); OutputStream os=sti.outputStream; if(receiver instanceof ExtendedReceiver) { if(os != null) { try { if(sti.state_id == null) ((ExtendedReceiver)receiver).getState(os); else ((ExtendedReceiver)receiver).getState(sti.state_id, os); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling getState() in receiver", t); } } } else if(receiver instanceof Receiver){ if(log.isWarnEnabled()){ log.warn("Channel has STREAMING_STATE_TRANSFER, however," + " application does not implement ExtendedMessageListener. State is not transfered"); Util.close(os); } } break; case Event.BLOCK: if(!receive_blocks) { // discard if client has not set 'receiving blocks' to 'on' return Boolean.TRUE; } if(receiver != null) { try { receiver.block(); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed calling block() in receiver", t); } return Boolean.TRUE; } break; case Event.UNBLOCK: //invoke receiver if block receiving is on if(receive_blocks && receiver instanceof ExtendedReceiver) { try { ((ExtendedReceiver)receiver).unblock(); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed calling unblock() in receiver", t); } } //flip promise flush_unblock_promise.setResult(Boolean.TRUE); return null; default: break; } if(type == Event.MSG || type == Event.VIEW_CHANGE || type == Event.SUSPECT || type == Event.GET_APPLSTATE || type== Event.STATE_TRANSFER_OUTPUTSTREAM || type == Event.BLOCK || type == Event.UNBLOCK) { try { mq.add(evt); } catch(QueueClosedException queue_closed) { ; // ignore } catch(Exception e) { if(log.isWarnEnabled()) log.warn("exception adding event " + evt + " to message queue", e); } } if(type == Event.GET_APPLSTATE) { try { return applstate_exchanger.exchange(null); } catch(InterruptedException e) { Thread.currentThread().interrupt(); return null; } } return null; } /** * Sends a message through the protocol stack if the stack is available * @param evt the message to send down, encapsulated in an event */ public void down(Event evt) { if(evt == null) return; switch(evt.getType()) { case Event.CONFIG: try { Map<String,Object> m=(Map<String,Object>)evt.getArg(); if(m != null) { additional_data.putAll(m); if(m.containsKey("additional_data")) { byte[] tmp=(byte[])m.get("additional_data"); if(local_addr != null) local_addr.setAdditionalData(tmp); } } } catch(Throwable t) { if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t); } break; } prot_stack.down(evt); } public Object downcall(Event evt) { if(evt == null) return null; switch(evt.getType()) { case Event.CONFIG: try { Map<String,Object> m=(Map<String,Object>)evt.getArg(); if(m != null) { additional_data.putAll(m); if(m.containsKey("additional_data")) { byte[] tmp=(byte[])m.get("additional_data"); if(local_addr != null) local_addr.setAdditionalData(tmp); } } } catch(Throwable t) { if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t); } break; } return prot_stack.down(evt); } @ManagedOperation public String toString(boolean details) { StringBuilder sb=new StringBuilder(); sb.append("local_addr=").append(local_addr).append('\n'); sb.append("cluster_name=").append(cluster_name).append('\n'); sb.append("my_view=").append(my_view).append('\n'); sb.append("connected=").append(connected).append('\n'); sb.append("closed=").append(closed).append('\n'); sb.append("incoming queue size=").append(mq.size()).append('\n'); if(details) { sb.append("receive_blocks=").append(receive_blocks).append('\n'); sb.append("receive_local_msgs=").append(receive_local_msgs).append('\n'); sb.append("state_transfer_supported=").append(state_transfer_supported).append('\n'); sb.append("props=").append(getProperties()).append('\n'); } return sb.toString(); } /* ----------------------------------- Private Methods ------------------------------------- */ protected final void init(ProtocolStackConfigurator configurator) throws ChannelException { if(log.isInfoEnabled()) log.info("JGroups version: " + Version.description); // ConfiguratorFactory.substituteVariables(configurator); // replace vars with system props String tmp=configurator.getProtocolStackString(); tmp=Util.substituteVariable(tmp); // replace vars with system props synchronized(getClass()) { prot_stack=new ProtocolStack(this, tmp); try { prot_stack.setup(); // Setup protocol stack (creates protocol, calls init() on them) properties=tmp; } catch(Throwable e) { throw new ChannelException("unable to setup the protocol stack: " + e.getMessage(), e); } } } protected final void init(JChannel ch) throws ChannelException { if(ch == null) throw new IllegalArgumentException("channel is null"); if(log.isInfoEnabled()) log.info("JGroups version: " + Version.description); synchronized(getClass()) { prot_stack=new ProtocolStack(this, null); try { prot_stack.setup(ch.getProtocolStack()); // Setup protocol stack (creates protocol, calls init() on them) getProperties(); } catch(Throwable e) { throw new ChannelException("unable to setup the protocol stack: " + e.getMessage(), e); } } } /** * Initializes all variables. Used after <tt>close()</tt> or <tt>disconnect()</tt>, * to be ready for new <tt>connect()</tt> */ private void init() { if(local_addr != null) down(new Event(Event.REMOVE_ADDRESS, local_addr)); local_addr=null; cluster_name=null; my_view=null; // changed by Bela Sept 25 2003 //if(mq != null && mq.closed()) // mq.reset(); connected=false; } private void startStack(String cluster_name) throws ChannelException { /*make sure the channel is not closed*/ checkClosed(); /*make sure we have a valid channel name*/ if(cluster_name == null) { if(log.isDebugEnabled()) log.debug("cluster_name is null, assuming unicast channel"); } else this.cluster_name=cluster_name; try { prot_stack.startStack(cluster_name, local_addr); // calls start() in all protocols, from top to bottom } catch(Throwable e) { throw new ChannelException("failed to start protocol stack", e); } /*create a temporary view, assume this channel is the only member and is the coordinator*/ Vector<Address> t=new Vector<Address>(1); t.addElement(local_addr); my_view=new View(local_addr, 0, t); // create a dummy view TP transport=prot_stack.getTransport(); transport.registerProbeHandler(probe_handler); } /** * Generates new UUID and sets local address. Sends down a REMOVE_ADDRESS (if existing address was present) and * a SET_LOCAL_ADDRESS */ private void setAddress() { UUID old_addr=local_addr; local_addr=UUID.randomUUID(); byte[] buf=(byte[])additional_data.get("additional_data"); if(buf != null) local_addr.setAdditionalData(buf); if(old_addr != null) down(new Event(Event.REMOVE_ADDRESS, old_addr)); if(name == null || name.length() == 0) // generate a logical name if not set name=Util.generateLocalName(); if(name != null && name.length() > 0) UUID.add(local_addr, name); Event evt=new Event(Event.SET_LOCAL_ADDRESS, local_addr); down(evt); if(up_handler != null) up_handler.up(evt); } /** * health check<BR> * throws a ChannelClosed exception if the channel is closed */ protected void checkClosed() throws ChannelClosedException { if(closed) throw new ChannelClosedException(); } protected void checkClosedOrNotConnected() throws ChannelNotConnectedException, ChannelClosedException { if(closed) throw new ChannelClosedException(); if(!connected) throw new ChannelNotConnectedException(); } /** * returns the value of the event<BR> * These objects will be returned<BR> * <PRE> * <B>Event Type - Return Type</B> * Event.MSG - returns a Message object * Event.VIEW_CHANGE - returns a View object * Event.SUSPECT - returns a SuspectEvent object * Event.BLOCK - returns a new BlockEvent object * Event.GET_APPLSTATE - returns a GetStateEvent object * Event.STATE_RECEIVED- returns a SetStateEvent object * Event.Exit - returns an ExitEvent object * All other - return the actual Event object * </PRE> * @param evt - the event of which you want to extract the value * @return the event value if it matches the select list, * returns null if the event is null * returns the event itself if a match (See above) can not be made of the event type */ static Object getEvent(Event evt) { if(evt == null) return null; // correct ? switch(evt.getType()) { case Event.MSG: return evt.getArg(); case Event.VIEW_CHANGE: return evt.getArg(); case Event.SUSPECT: return new SuspectEvent(evt.getArg()); case Event.BLOCK: return new BlockEvent(); case Event.UNBLOCK: return new UnblockEvent(); case Event.GET_APPLSTATE: StateTransferInfo info=(StateTransferInfo)evt.getArg(); return new GetStateEvent(info.target, info.state_id); case Event.STATE_RECEIVED: info=(StateTransferInfo)evt.getArg(); return new SetStateEvent(info.state, info.state_id); case Event.STATE_TRANSFER_OUTPUTSTREAM: info = (StateTransferInfo)evt.getArg(); return new StreamingGetStateEvent(info.outputStream,info.state_id); case Event.STATE_TRANSFER_INPUTSTREAM: info = (StateTransferInfo)evt.getArg(); return new StreamingSetStateEvent(info.inputStream,info.state_id); default: return evt; } } /** * Disconnects and closes the channel. * This method does the following things * <ol> * <li>Calls <code>this.disconnect</code> if the disconnect parameter is true * <li>Calls <code>Queue.close</code> on mq if the close_mq parameter is true * <li>Calls <code>ProtocolStack.stop</code> on the protocol stack * <li>Calls <code>ProtocolStack.destroy</code> on the protocol stack * <li>Sets the channel closed and channel connected flags to true and false * <li>Notifies any channel listener of the channel close operation * </ol> */ protected void _close(boolean disconnect, boolean close_mq) { UUID old_addr=local_addr; if(closed) return; if(disconnect) disconnect(); // leave group if connected if(close_mq) closeMessageQueue(false); stopStack(true, true); closed=true; connected=false; notifyChannelClosed(this); init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining if(old_addr != null) UUID.remove(old_addr); } protected void stopStack(boolean stop, boolean destroy) { if(prot_stack != null) { try { if(stop) prot_stack.stopStack(cluster_name); if(destroy) prot_stack.destroy(); } catch(Exception e) { if(log.isErrorEnabled()) log.error("failed destroying the protocol stack", e); } TP transport=prot_stack.getTransport(); if(transport != null) transport.unregisterProbeHandler(probe_handler); } } public final void closeMessageQueue(boolean flush_entries) { mq.close(flush_entries); } public boolean flushSupported() { return flush_supported; } /** * Will perform a flush of the system, ie. all pending messages are flushed out of the * system and all members ack their reception. After this call returns, no member will * be sending any messages until {@link #stopFlush()} is called. * <p> * In case of flush collisions, random sleep time backoff algorithm is employed and * flush is reattempted for numberOfAttempts. Therefore this method is guaranteed * to return after timeout x numberOfAttempts miliseconds. * * @param automatic_resume Call {@link #stopFlush()} after the flush * @return true if FLUSH completed within the timeout */ public boolean startFlush(boolean automatic_resume) { if(!flushSupported()) { throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } boolean successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND)); if(automatic_resume) stopFlush(); return successfulFlush; } /** * Performs a partial flush in a cluster for flush participants. * <p> * All pending messages are flushed out only for flush participants. * Remaining members in a cluster are not included in flush. * Flush participants should be a proper subset of a current view. * * <p> * In case of flush collisions, random sleep time backoff algorithm is employed and * flush is reattempted for numberOfAttempts. Therefore this method is guaranteed * to return after timeout x numberOfAttempts miliseconds. * * @param automatic_resume Call {@link #stopFlush()} after the flush * @return true if FLUSH completed within the timeout */ public boolean startFlush(List<Address> flushParticipants,boolean automatic_resume) { boolean successfulFlush = false; if(!flushSupported()){ throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } View v = getView(); if(v != null && v.getMembers().containsAll(flushParticipants)){ successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND, flushParticipants)); }else{ throw new IllegalArgumentException("Current view " + v + " does not contain all flush participants " + flushParticipants); } if(automatic_resume) stopFlush(flushParticipants); return successfulFlush; } /** * Will perform a flush of the system, ie. all pending messages are flushed out of the * system and all members ack their reception. After this call returns, no member will * be sending any messages until {@link #stopFlush()} is called. * <p> * In case of flush collisions, random sleep time backoff algorithm is employed and * flush is reattempted for numberOfAttempts. Therefore this method is guaranteed * to return after timeout x numberOfAttempts miliseconds. * @param timeout * @param automatic_resume Call {@link #stopFlush()} after the flush * @return true if FLUSH completed within the timeout */ public boolean startFlush(long timeout, boolean automatic_resume) { return startFlush(automatic_resume); } public void stopFlush() { if(!flushSupported()) { throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } flush_unblock_promise.reset(); down(new Event(Event.RESUME)); //do not return until UNBLOCK event is received try { flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT); } catch(TimeoutException te) { log.warn("Timeout waiting for UNBLOCK event at " + getAddress()); } } public void stopFlush(List<Address> flushParticipants) { if(!flushSupported()) { throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } flush_unblock_promise.reset(); down(new Event(Event.RESUME, flushParticipants)); // do not return until UNBLOCK event is received try { flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT); } catch(TimeoutException te) { log.warn("Timeout waiting for UNBLOCK event at " + getAddress()); } } @Override public Map<String, Object> getInfo(){ return new HashMap<String, Object>(config); } public void setInfo(String key, Object value) { if(key != null) config.put(key, value); } Address determineCoordinator() { Vector<Address> mbrs=my_view != null? my_view.getMembers() : null; if(mbrs == null) return null; if(!mbrs.isEmpty()) return mbrs.firstElement(); return null; } private TimeScheduler getTimer() { if(prot_stack != null) { TP transport=prot_stack.getTransport(); if(transport != null) { return transport.getTimer(); } } return null; } /* ------------------------------- End of Private Methods ---------------------------------- */ class MyProbeHandler implements TP.ProbeHandler { public Map<String, String> handleProbe(String... keys) { HashMap<String, String> map=new HashMap<String, String>(2); for(String key: keys) { if(key.startsWith("jmx")) { Map<String, Object> tmp_stats; int index=key.indexOf("="); if(index > -1) { String value=key.substring(index +1); tmp_stats=dumpStats(value); } else tmp_stats=dumpStats(); map.put("jmx", tmp_stats != null? Util.mapToString(tmp_stats) : "null"); continue; } if(key.equals("info")) { Map<String, Object> tmp_info=getInfo(); map.put("info", tmp_info != null? Util.mapToString(tmp_info) : "null"); } } map.put("version", Version.description + ", cvs=\"" + Version.cvs + "\""); if(my_view != null && !map.containsKey("view")) map.put("view", my_view.toString()); map.put("local_addr", getAddressAsString() + " [" + getAddressAsUUID() + "]"); map.put("cluster", getClusterName()); return map; } public String[] supportedKeys() { return new String[]{"jmx", "info"}; } } }
src/org/jgroups/JChannel.java
package org.jgroups; import org.jgroups.annotations.MBean; import org.jgroups.annotations.ManagedAttribute; import org.jgroups.annotations.ManagedOperation; import org.jgroups.conf.ConfiguratorFactory; import org.jgroups.conf.ProtocolStackConfigurator; import org.jgroups.logging.Log; import org.jgroups.logging.LogFactory; import org.jgroups.protocols.TP; import org.jgroups.stack.ProtocolStack; import org.jgroups.stack.StateTransferInfo; import org.jgroups.util.*; import org.w3c.dom.Element; import java.io.File; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Vector; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Exchanger; /** * JChannel is a pure Java implementation of Channel. * When a JChannel object is instantiated it automatically sets up the * protocol stack. * <p> * <B>Properties</B> * <P> * Properties are used to configure a channel, and are accepted in * several forms; the String form is described here. * A property string consists of a number of properties separated by * colons. For example: * <p> * <pre>"&lt;prop1&gt;(arg1=val1):&lt;prop2&gt;(arg1=val1;arg2=val2):&lt;prop3&gt;:&lt;propn&gt;"</pre> * <p> * Each property relates directly to a protocol layer, which is * implemented as a Java class. When a protocol stack is to be created * based on the above property string, the first property becomes the * bottom-most layer, the second one will be placed on the first, etc.: * the stack is created from the bottom to the top, as the string is * parsed from left to right. Each property has to be the name of a * Java class that resides in the * {@link org.jgroups.protocols} package. * <p> * Note that only the base name has to be given, not the fully specified * class name (e.g., UDP instead of org.jgroups.protocols.UDP). * <p> * Each layer may have 0 or more arguments, which are specified as a * list of name/value pairs in parentheses directly after the property. * In the example above, the first protocol layer has 1 argument, * the second 2, the third none. When a layer is created, these * properties (if there are any) will be set in a layer by invoking * the layer's setProperties() method * <p> * As an example the property string below instructs JGroups to create * a JChannel with protocols UDP, PING, FD and GMS:<p> * <pre>"UDP(mcast_addr=228.10.9.8;mcast_port=5678):PING:FD:GMS"</pre> * <p> * The UDP protocol layer is at the bottom of the stack, and it * should use mcast address 228.10.9.8. and port 5678 rather than * the default IP multicast address and port. The only other argument * instructs FD to output debug information while executing. * Property UDP refers to a class {@link org.jgroups.protocols.UDP}, * which is subsequently loaded and an instance of which is created as protocol layer. * If any of these classes are not found, an exception will be thrown and * the construction of the stack will be aborted. * * @author Bela Ban * @version $Id: JChannel.java,v 1.222 2009/08/19 06:14:17 belaban Exp $ */ @MBean(description="JGroups channel") public class JChannel extends Channel { /** The default protocol stack used by the default constructor */ public static final String DEFAULT_PROTOCOL_STACK="udp.xml"; protected String properties=null; /*the address of this JChannel instance*/ private UUID local_addr=null; private String name=null; /*the channel (also know as group) name*/ private String cluster_name=null; // group name /*the latest view of the group membership*/ private View my_view=null; /*the queue that is used to receive messages (events) from the protocol stack*/ private final Queue mq=new Queue(); /*the protocol stack, used to send and receive messages from the protocol stack*/ private ProtocolStack prot_stack=null; private final Promise<Boolean> state_promise=new Promise<Boolean>(); private final Exchanger<StateTransferInfo> applstate_exchanger=new Exchanger<StateTransferInfo>(); private final Promise<Boolean> flush_unblock_promise=new Promise<Boolean>(); /*if FLUSH is used channel waits for UNBLOCK event, this is the default timeout, 5 secs*/ private static final long FLUSH_UNBLOCK_TIMEOUT=5000; /*flag to indicate whether to receive blocks, if this is set to true, receive_views is set to true*/ @ManagedAttribute(description="Flag indicating whether to receive blocks",writable=true) private boolean receive_blocks=false; /*flag to indicate whether to receive local messages *if this is set to false, the JChannel will not receive messages sent by itself*/ @ManagedAttribute(description="Flag indicating whether to receive this channel's own messages",writable=true) private boolean receive_local_msgs=true; /*channel connected flag*/ protected boolean connected=false; /*channel closed flag*/ protected boolean closed=false; // close() has been called, channel is unusable /** True if a state transfer protocol is available, false otherwise */ private boolean state_transfer_supported=false; // set by CONFIG event from STATE_TRANSFER protocol /** True if a flush protocol is available, false otherwise */ private volatile boolean flush_supported=false; // set by CONFIG event from FLUSH protocol /** Provides storage for arbitrary objects. Protocols can send up CONFIG events, and all key-value pairs of * a CONFIG event will be added to additional_data. On reconnect, a CONFIG event will be sent down by the channel, * containing all key-value pairs of additional_data */ protected final Map<String,Object> additional_data=new HashMap<String,Object>(); protected final ConcurrentMap<String,Object> config=new ConcurrentHashMap<String,Object>(); protected final Log log=LogFactory.getLog(getClass()); /** Collect statistics */ @ManagedAttribute(description="Collect channel statistics",writable=true) protected boolean stats=true; protected long sent_msgs=0, received_msgs=0, sent_bytes=0, received_bytes=0; private final TP.ProbeHandler probe_handler=new MyProbeHandler(); /** * Used by subclass to create a JChannel without a protocol stack, don't use as application programmer * @deprecated Remove in 3.0 */ protected JChannel(boolean no_op) { ; } /** * Constructs a <code>JChannel</code> instance with the protocol stack * specified by the <code>DEFAULT_PROTOCOL_STACK</code> member. * * @throws ChannelException if problems occur during the initialization of * the protocol stack. */ public JChannel() throws ChannelException { this(DEFAULT_PROTOCOL_STACK); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration contained by the specified file. * * @param properties a file containing a JGroups XML protocol stack * configuration. * * @throws ChannelException if problems occur during the configuration or * initialization of the protocol stack. */ public JChannel(File properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration contained by the specified XML element. * * @param properties a XML element containing a JGroups XML protocol stack * configuration. * * @throws ChannelException if problems occur during the configuration or * initialization of the protocol stack. */ public JChannel(Element properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration indicated by the specified URL. * * @param properties a URL pointing to a JGroups XML protocol stack * configuration. * * @throws ChannelException if problems occur during the configuration or * initialization of the protocol stack. */ public JChannel(URL properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration based upon the specified properties parameter. * * @param properties an old style property string, a string representing a * system resource containing a JGroups XML configuration, * a string representing a URL pointing to a JGroups XML * XML configuration, or a string representing a file name * that contains a JGroups XML configuration. * * @throws ChannelException if problems occur during the configuration and * initialization of the protocol stack. */ public JChannel(String properties) throws ChannelException { this(ConfiguratorFactory.getStackConfigurator(properties)); } /** * Constructs a <code>JChannel</code> instance with the protocol stack * configuration contained by the protocol stack configurator parameter. * <p> * All of the public constructors of this class eventually delegate to this * method. * * @param configurator a protocol stack configurator containing a JGroups * protocol stack configuration. * * @throws ChannelException if problems occur during the initialization of * the protocol stack. */ public JChannel(ProtocolStackConfigurator configurator) throws ChannelException { init(configurator); } /** * Creates a new JChannel with the protocol stack as defined in the properties * parameter. an example of this parameter is<BR> * "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE"<BR> * Other examples can be found in the ./conf directory<BR> * @param properties the protocol stack setup; if null, the default protocol stack will be used. * The properties can also be a java.net.URL object or a string that is a URL spec. * The JChannel will validate any URL object and String object to see if they are a URL. * In case of the parameter being a url, the JChannel will try to load the xml from there. * In case properties is a org.w3c.dom.Element, the ConfiguratorFactory will parse the * DOM tree with the element as its root element. * @deprecated Use the constructors with specific parameter types instead. */ public JChannel(Object properties) throws ChannelException { if (properties == null) properties = DEFAULT_PROTOCOL_STACK; ProtocolStackConfigurator c=null; try { c=ConfiguratorFactory.getStackConfigurator(properties); } catch(Exception x) { throw new ChannelException("unable to load protocol stack", x); } init(c); } /** * Creates a channel with the same configuration as the channel passed to this constructor. This is used by * testing code, and should not be used by any other code ! * @param ch * @throws ChannelException */ public JChannel(JChannel ch) throws ChannelException { init(ch); receive_blocks=ch.receive_blocks; receive_local_msgs=ch.receive_local_msgs; receive_blocks=ch.receive_blocks; } /** * Returns the protocol stack. * Currently used by Debugger. * Specific to JChannel, therefore * not visible in Channel */ public ProtocolStack getProtocolStack() { return prot_stack; } protected Log getLog() { return log; } /** * Returns the protocol stack configuration in string format. An example of this property is<BR> * "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE" */ public String getProperties() { String retval=prot_stack != null? prot_stack.printProtocolSpec(true) : null; if(retval != null) properties=retval; return properties; } public boolean statsEnabled() { return stats; } public void enableStats(boolean stats) { this.stats=stats; } @ManagedOperation public void resetStats() { sent_msgs=received_msgs=sent_bytes=received_bytes=0; } @ManagedAttribute public long getSentMessages() {return sent_msgs;} @ManagedAttribute public long getSentBytes() {return sent_bytes;} @ManagedAttribute public long getReceivedMessages() {return received_msgs;} @ManagedAttribute public long getReceivedBytes() {return received_bytes;} @ManagedAttribute public int getNumberOfTasksInTimer() { TimeScheduler timer=getTimer(); return timer != null? timer.size() : -1; } @ManagedAttribute public int getTimerThreads() { TimeScheduler timer=getTimer(); return timer != null? timer.getCorePoolSize() : -1; } public String dumpTimerQueue() { TimeScheduler timer=getTimer(); return timer != null? timer.dumpTaskQueue() : "<n/a"; } /** * Returns a pretty-printed form of all the protocols. If include_properties * is set, the properties for each protocol will also be printed. */ @ManagedOperation public String printProtocolSpec(boolean include_properties) { ProtocolStack ps=getProtocolStack(); return ps != null? ps.printProtocolSpec(include_properties) : null; } /** * Connects the channel to a group. * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * This method starts the protocol stack by calling ProtocolStack.start, * then it sends an Event.CONNECT event down the stack and waits for the return value. * Once the call returns, the channel listeners are notified and the channel is considered connected. * * @param cluster_name A <code>String</code> denoting the group name. Cannot be null. * @exception ChannelException The protocol stack cannot be started * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. */ @ManagedOperation(description="Connects the channel to a group") public synchronized void connect(String cluster_name) throws ChannelException { connect(cluster_name,true); } /** * Connects the channel to a group. * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * This method starts the protocol stack by calling ProtocolStack.start, * then it sends an Event.CONNECT event down the stack and waits for the return value. * Once the call returns, the channel listeners are notified and the channel is considered connected. * * @param cluster_name A <code>String</code> denoting the group name. Cannot be null. * @exception ChannelException The protocol stack cannot be started * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. */ @ManagedOperation(description="Connects the channel to a group") public synchronized void connect(String cluster_name, boolean useFlushIfPresent) throws ChannelException { if(connected) { if(log.isTraceEnabled()) log.trace("already connected to " + cluster_name); return; } setAddress(); startStack(cluster_name); if(cluster_name != null) { // only connect if we are not a unicast channel Event connect_event = null; if (useFlushIfPresent) { connect_event = new Event(Event.CONNECT_USE_FLUSH, cluster_name); } else { connect_event = new Event(Event.CONNECT, cluster_name); } Object res=downcall(connect_event); // waits forever until connected (or channel is closed) if(res != null && res instanceof Exception) { // the JOIN was rejected by the coordinator stopStack(true, false); init(); throw new ChannelException("connect() failed", (Throwable)res); } //if FLUSH is used do not return from connect() until UNBLOCK event is received if(flushSupported()) { try { flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT); } catch (TimeoutException timeout) { if(log.isWarnEnabled()) log.warn(local_addr + " waiting on UNBLOCK after connect() timed out"); } } } connected=true; notifyChannelConnected(this); } /** * Connects this channel to a group and gets a state from a specified state * provider. * <p> * * This method essentially invokes * <code>connect<code> and <code>getState<code> methods successively. * If FLUSH protocol is in channel's stack definition only one flush is executed for both connecting and * fetching state rather than two flushes if we invoke <code>connect<code> and <code>getState<code> in succesion. * * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * * * @param cluster_name the cluster name to connect to. Cannot be null. * @param target the state provider. If null state will be fetched from coordinator, unless this channel is coordinator. * @param state_id the substate id for partial state transfer. If null entire state will be transferred. * @param timeout the timeout for state transfer. * * @exception ChannelException The protocol stack cannot be started * @exception ChannelException Connecting to cluster was not successful * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. * @exception StateTransferException State transfer was not successful * */ public synchronized void connect(String cluster_name, Address target, String state_id, long timeout) throws ChannelException { connect(cluster_name, target, state_id, timeout,true); } /** * Connects this channel to a group and gets a state from a specified state * provider. * <p> * * This method essentially invokes * <code>connect<code> and <code>getState<code> methods successively. * If FLUSH protocol is in channel's stack definition only one flush is executed for both connecting and * fetching state rather than two flushes if we invoke <code>connect<code> and <code>getState<code> in succesion. * * If the channel is already connected, an error message will be printed to the error log. * If the channel is closed a ChannelClosed exception will be thrown. * * * @param cluster_name the cluster name to connect to. Cannot be null. * @param target the state provider. If null state will be fetched from coordinator, unless this channel is coordinator. * @param state_id the substate id for partial state transfer. If null entire state will be transferred. * @param timeout the timeout for state transfer. * * @exception ChannelException The protocol stack cannot be started * @exception ChannelException Connecting to cluster was not successful * @exception ChannelClosedException The channel is closed and therefore cannot be used any longer. * A new channel has to be created first. * @exception StateTransferException State transfer was not successful * */ public synchronized void connect(String cluster_name, Address target, String state_id, long timeout, boolean useFlushIfPresent) throws ChannelException { if(connected) { if(log.isTraceEnabled()) log.trace("already connected to " + cluster_name); return; } setAddress(); startStack(cluster_name); boolean stateTransferOk=false; boolean joinSuccessful=false; boolean canFetchState=false; // only connect if we are not a unicast channel if(cluster_name == null) return; try { Event connect_event=null; if(useFlushIfPresent) connect_event=new Event(Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH, cluster_name); else connect_event=new Event(Event.CONNECT_WITH_STATE_TRANSFER, cluster_name); Object res=downcall(connect_event); // waits forever until connected (or channel is closed) joinSuccessful=!(res != null && res instanceof Exception); if(!joinSuccessful) { stopStack(true, false); init(); throw new ChannelException("connect() failed", (Throwable)res); } connected=true; notifyChannelConnected(this); canFetchState=getView() != null && getView().size() > 1; // if I am not the only member in cluster then if(canFetchState) { try { // fetch state from target stateTransferOk=getState(target, state_id, timeout, false); if(!stateTransferOk) { throw new StateTransferException(getAddress() + " could not fetch state from " + target); } } catch(Exception e) { throw new StateTransferException(getAddress() + " could not fetch state from " + target, e); } } } finally { if(flushSupported()) stopFlush(); } } /** * Disconnects the channel if it is connected. If the channel is closed, * this operation is ignored<BR> * Otherwise the following actions happen in the listed order<BR> * <ol> * <li> The JChannel sends a DISCONNECT event down the protocol stack<BR> * <li> Blocks until the event has returned<BR> * <li> Sends a STOP_QUEING event down the stack<BR> * <li> Stops the protocol stack by calling ProtocolStack.stop()<BR> * <li> Notifies the listener, if the listener is available<BR> * </ol> */ @ManagedOperation(description="Disconnects the channel if it is connected") public synchronized void disconnect() { if(closed) return; if(connected) { if(cluster_name != null) { // Send down a DISCONNECT event, which travels down to the GMS, where a response is returned Event disconnect_event=new Event(Event.DISCONNECT, local_addr); down(disconnect_event); // DISCONNECT is handled by each layer } connected=false; stopStack(true, false); notifyChannelDisconnected(this); init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining } } /** * Destroys the channel. * After this method has been called, the channel us unusable.<BR> * This operation will disconnect the channel and close the channel receive queue immediately<BR> */ @ManagedOperation(description="Disconnects and destroys the channel") public synchronized void close() { _close(true, true); // by default disconnect before closing channel and close mq } /** * Shuts down a channel without disconnecting. To be used by tests only, don't use for application purposes */ @ManagedOperation(description="Shuts down the channel without disconnecting") public synchronized void shutdown() { down(new Event(Event.SHUTDOWN)); _close(false, true); // by default disconnect before closing channel and close mq } /** * Opens the channel. Note that the channel is only open, but <em>not connected</em>. * This does the following actions: * <ol> * <li> Resets the receiver queue by calling Queue.reset * <li> Sets up the protocol stack by calling ProtocolStack.setup * <li> Sets the closed flag to false * </ol> * @deprecated With the removal of shunning, this method should not be used anymore */ @Deprecated public synchronized void open() throws ChannelException { if(!closed) throw new ChannelException("channel is already open"); try { mq.reset(); String props=getProperties(); // new stack is created on open() - bela June 12 2003 prot_stack=new ProtocolStack(this, props); prot_stack.setup(); closed=false; } catch(Exception e) { throw new ChannelException("failed to open channel" , e); } } /** * returns true if the Open operation has been called successfully */ @ManagedAttribute public boolean isOpen() { return !closed; } /** * returns true if the Connect operation has been called successfully */ @ManagedAttribute public boolean isConnected() { return connected; } @ManagedAttribute public int getNumMessages() { return mq.size(); } @ManagedOperation public String dumpQueue() { return Util.dumpQueue(mq); } /** * Returns a map of statistics of the various protocols and of the channel itself. * @return Map<String,Map>. A map where the keys are the protocols ("channel" pseudo key is * used for the channel itself") and the values are property maps. */ @ManagedOperation public Map<String,Object> dumpStats() { Map<String,Object> retval=prot_stack.dumpStats(); if(retval != null) { Map<String,Long> tmp=dumpChannelStats(); if(tmp != null) retval.put("channel", tmp); } return retval; } @ManagedOperation public Map<String,Object> dumpStats(String protocol_name) { return prot_stack.dumpStats(protocol_name); } protected Map<String,Long> dumpChannelStats() { Map<String,Long> retval=new HashMap<String,Long>(); retval.put("sent_msgs", new Long(sent_msgs)); retval.put("sent_bytes", new Long(sent_bytes)); retval.put("received_msgs", new Long(received_msgs)); retval.put("received_bytes", new Long(received_bytes)); return retval; } /** * Sends a message through the protocol stack. * Implements the Transport interface. * * @param msg the message to be sent through the protocol stack, * the destination of the message is specified inside the message itself * @exception ChannelNotConnectedException * @exception ChannelClosedException */ @ManagedOperation public void send(Message msg) throws ChannelNotConnectedException, ChannelClosedException { checkClosedOrNotConnected(); if(msg == null) throw new NullPointerException("msg is null"); if(stats) { sent_msgs++; sent_bytes+=msg.getLength(); } down(new Event(Event.MSG, msg)); } /** * creates a new message with the destination address, and the source address * and the object as the message value * @param dst - the destination address of the message, null for all members * @param src - the source address of the message * @param obj - the value of the message * @exception ChannelNotConnectedException * @exception ChannelClosedException * @see JChannel#send */ @ManagedOperation public void send(Address dst, Address src, Serializable obj) throws ChannelNotConnectedException, ChannelClosedException { send(new Message(dst, src, obj)); } /** * Blocking receive method. * This method returns the object that was first received by this JChannel and that has not been * received before. After the object is received, it is removed from the receive queue.<BR> * If you only want to inspect the object received without removing it from the queue call * JChannel.peek<BR> * If no messages are in the receive queue, this method blocks until a message is added or the operation times out<BR> * By specifying a timeout of 0, the operation blocks forever, or until a message has been received. * @param timeout the number of milliseconds to wait if the receive queue is empty. 0 means wait forever * @exception TimeoutException if a timeout occured prior to a new message was received * @exception ChannelNotConnectedException * @exception ChannelClosedException * @see JChannel#peek * @deprecated Use a {@link Receiver} instead */ public Object receive(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException { checkClosedOrNotConnected(); try { Event evt=(timeout <= 0)? (Event)mq.remove() : (Event)mq.remove(timeout); Object retval=getEvent(evt); evt=null; return retval; } catch(QueueClosedException queue_closed) { throw new ChannelClosedException(); } catch(TimeoutException t) { throw t; } catch(Exception e) { if(log.isErrorEnabled()) log.error("exception: " + e); return null; } } /** * Just peeks at the next message, view or block. Does <em>not</em> install * new view if view is received<BR> * Does the same thing as JChannel.receive but doesn't remove the object from the * receiver queue * * @deprecated Use a {@link Receiver} instead */ public Object peek(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException { checkClosedOrNotConnected(); try { Event evt=(timeout <= 0)? (Event)mq.peek() : (Event)mq.peek(timeout); Object retval=getEvent(evt); evt=null; return retval; } catch(QueueClosedException queue_closed) { if(log.isErrorEnabled()) log.error("exception: " + queue_closed); return null; } catch(TimeoutException t) { return null; } catch(Exception e) { if(log.isErrorEnabled()) log.error("exception: " + e); return null; } } /** * Returns the current view. * <BR> * If the channel is not connected or if it is closed it will return null. * <BR> * @return returns the current group view, or null if the channel is closed or disconnected */ public View getView() { return closed || !connected ? null : my_view; } @ManagedAttribute(name="View") public String getViewAsString() { View v=getView(); return v != null ? v.toString() : "n/a"; } @ManagedAttribute public static String getVersion() { return Version.printDescription(); } public Address getLocalAddress() { return getAddress(); } /** * Returns the local address of the channel (null if the channel is closed) */ public Address getAddress() { return closed ? null : local_addr; } @ManagedAttribute(name="Address") public String getAddressAsString() { return local_addr != null? local_addr.toString() : "n/a"; } @ManagedAttribute(name="Address (UUID)") public String getAddressAsUUID() { return local_addr != null? local_addr.toStringLong() : null; } public String getName() { return name; } /** * Sets the logical name for the channel. The name will stay associated with this channel for the channel's * lifetime (until close() is called). This method should be called <em>before</em> calling connect().<br/> * @param name */ @ManagedAttribute(writable=true, description="The logical name of this channel. Stays with the channel until " + "the channel is closed") public void setName(String name) { if(name != null) { this.name=name; if(local_addr != null) { UUID.add(local_addr, this.name); } } } /** * returns the name of the channel * if the channel is not connected or if it is closed it will return null * @deprecated Use {@link #getClusterName()} instead */ public String getChannelName() { return closed ? null : !connected ? null : cluster_name; } @ManagedAttribute(description="Returns cluster name this channel is connected to") public String getClusterName() { return closed ? null : !connected ? null : cluster_name; } /** * Sets a channel option. The options can be one of the following: * <UL> * <LI> Channel.BLOCK * <LI> Channel.LOCAL * <LI> Channel.AUTO_RECONNECT * <LI> Channel.AUTO_GETSTATE * </UL> * <P> * There are certain dependencies between the options that you can set, * I will try to describe them here. * <P> * Option: Channel.BLOCK<BR> * Value: java.lang.Boolean<BR> * Result: set to true will set setOpt(VIEW, true) and the JChannel will receive BLOCKS and VIEW events<BR> *<BR> * Option: LOCAL<BR> * Value: java.lang.Boolean<BR> * Result: set to true the JChannel will receive messages that it self sent out.<BR> *<BR> * Option: AUTO_RECONNECT<BR> * Value: java.lang.Boolean<BR> * Result: set to true and the JChannel will try to reconnect when it is being closed<BR> *<BR> * Option: AUTO_GETSTATE<BR> * Value: java.lang.Boolean<BR> * Result: set to true, the AUTO_RECONNECT will be set to true and the JChannel will try to get the state after a close and reconnect happens<BR> * <BR> * * @param option the parameter option Channel.VIEW, Channel.SUSPECT, etc * @param value the value to set for this option * */ public void setOpt(int option, Object value) { if(closed) { if(log.isWarnEnabled()) log.warn("channel is closed; option not set !"); return; } switch(option) { case VIEW: if(log.isWarnEnabled()) log.warn("option VIEW has been deprecated (it is always true now); this option is ignored"); break; case SUSPECT: if(log.isWarnEnabled()) log.warn("option SUSPECT has been deprecated (it is always true now); this option is ignored"); break; case BLOCK: if(value instanceof Boolean) receive_blocks=((Boolean)value).booleanValue(); else if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " (" + value + "): value has to be Boolean"); break; case GET_STATE_EVENTS: if(log.isWarnEnabled()) log.warn("option GET_STATE_EVENTS has been deprecated (it is always true now); this option is ignored"); break; case LOCAL: if(value instanceof Boolean) receive_local_msgs=((Boolean)value).booleanValue(); else if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " (" + value + "): value has to be Boolean"); break; case AUTO_RECONNECT: if(log.isWarnEnabled()) log.warn("Option AUTO_RECONNECT has been deprecated and is ignored"); break; case AUTO_GETSTATE: if(log.isWarnEnabled()) log.warn("Option AUTO_GETSTATE has been deprecated and is ignored"); break; default: if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known"); break; } } /** * returns the value of an option. * @param option the option you want to see the value for * @return the object value, in most cases java.lang.Boolean * @see JChannel#setOpt */ public Object getOpt(int option) { switch(option) { case VIEW: return Boolean.TRUE; case BLOCK: return receive_blocks ? Boolean.TRUE : Boolean.FALSE; case SUSPECT: return Boolean.TRUE; case AUTO_RECONNECT: return false; case AUTO_GETSTATE: return false; case GET_STATE_EVENTS: return Boolean.TRUE; case LOCAL: return receive_local_msgs ? Boolean.TRUE : Boolean.FALSE; default: if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known"); return null; } } /** * Called to acknowledge a block() (callback in <code>MembershipListener</code> or * <code>BlockEvent</code> received from call to <code>receive()</code>). * After sending blockOk(), no messages should be sent until a new view has been received. * Calling this method on a closed channel has no effect. */ public void blockOk() { } /** * Retrieves a full state from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ public boolean getState(Address target, long timeout) throws ChannelNotConnectedException, ChannelClosedException { return getState(target,null,timeout); } /** * Retrieves a substate (or partial state) indicated by state_id from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param state_id * The ID of the substate. If null, the entire state will be * transferred * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ public boolean getState(Address target, String state_id, long timeout) throws ChannelNotConnectedException, ChannelClosedException { return getState(target, state_id, timeout, true); } /** * Retrieves a substate (or partial state) indicated by state_id from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param state_id * The ID of the substate. If null, the entire state will be * transferred * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * @param useFlushIfPresent * whether channel should be flushed prior to state retrieval * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ public boolean getState(Address target, String state_id, long timeout, boolean useFlushIfPresent) throws ChannelNotConnectedException, ChannelClosedException { Callable<Boolean> flusher = new Callable<Boolean>() { public Boolean call() throws Exception { return Util.startFlush(JChannel.this); } }; return getState(target, state_id, timeout, useFlushIfPresent?flusher:null); } /** * Retrieves a substate (or partial state) indicated by state_id from the target member. * <p> * * State transfer is initiated by invoking getState on this channel, state * receiver, and sending a GET_STATE message to a target member - state * provider. State provider passes GET_STATE message to application that is * using the state provider channel which in turn provides an application * state to a state receiver. Upon successful installation of a state at * state receiver this method returns true. * * * @param target * State provider. If null, coordinator is used * @param state_id * The ID of the substate. If null, the entire state will be * transferred * @param timeout * the number of milliseconds to wait for the operation to * complete successfully. 0 waits until the state has been * received * @param flushInvoker * algorithm invoking flush * * @see ExtendedMessageListener#getState(OutputStream) * @see ExtendedMessageListener#setState(InputStream) * @see MessageListener#getState() * @see MessageListener#setState(byte[]) * * * @return true if state transfer was successful, false otherwise * @throws ChannelNotConnectedException * if channel was not connected at the time state retrieval * was initiated * @throws ChannelClosedException * if channel was closed at the time state retrieval was * initiated * @throws IllegalStateException * if one of state transfer protocols is not present in this * channel * @throws IllegalStateException * if flush is used in this channel and cluster could not be * flushed */ protected boolean getState(Address target, String state_id, long timeout,Callable<Boolean> flushInvoker) throws ChannelNotConnectedException, ChannelClosedException { checkClosedOrNotConnected(); if(!state_transfer_supported) { throw new IllegalStateException("fetching state will fail as state transfer is not supported. " + "Add one of the STATE_TRANSFER protocols to your protocol configuration"); } if(target == null) target=determineCoordinator(); if(target != null && local_addr != null && target.equals(local_addr)) { if(log.isTraceEnabled()) log.trace("cannot get state from myself (" + target + "): probably the first member"); return false; } boolean initiateFlush = flushSupported() && flushInvoker!=null; if (initiateFlush) { boolean successfulFlush = false; try { successfulFlush = flushInvoker.call(); } catch (Exception e) { successfulFlush = false; // http://jira.jboss.com/jira/browse/JGRP-759 } finally { if (!successfulFlush) { throw new IllegalStateException("Node "+ local_addr+ " could not flush the cluster for state retrieval"); } } } state_promise.reset(); StateTransferInfo state_info=new StateTransferInfo(target, state_id, timeout); down(new Event(Event.GET_STATE, state_info)); Boolean b=state_promise.getResult(state_info.timeout); if(initiateFlush) stopFlush(); boolean state_transfer_successfull = b != null && b.booleanValue(); if(!state_transfer_successfull) down(new Event(Event.RESUME_STABLE)); return state_transfer_successfull; } /** * Retrieves the current group state. Sends GET_STATE event down to STATE_TRANSFER layer. * Blocks until STATE_TRANSFER sends up a GET_STATE_OK event or until <code>timeout</code> * milliseconds have elapsed. The argument of GET_STATE_OK should be a vector of objects. * @param targets - the target members to receive the state from ( an Address list ) * @param timeout - the number of milliseconds to wait for the operation to complete successfully * @return true of the state was received, false if the operation timed out * @deprecated Not really needed - we always want to get the state from a single member, * use {@link #getState(org.jgroups.Address, long)} instead */ public boolean getAllStates(Vector targets, long timeout) throws ChannelNotConnectedException, ChannelClosedException { throw new UnsupportedOperationException("use getState() instead"); } /** * Called by the application is response to receiving a <code>getState()</code> object when * calling <code>receive()</code>. * When the application receives a getState() message on the receive() method, * it should call returnState() to reply with the state of the application * @param state The state of the application as a byte buffer * (to send over the network). */ public void returnState(byte[] state) { try { StateTransferInfo state_info=new StateTransferInfo(null, null, 0L, state); applstate_exchanger.exchange(state_info); } catch(InterruptedException e) { Thread.currentThread().interrupt(); } } /** * Returns a substate as indicated by state_id * @param state * @param state_id */ public void returnState(byte[] state, String state_id) { try { StateTransferInfo state_info=new StateTransferInfo(null, state_id, 0L, state); applstate_exchanger.exchange(state_info); } catch(InterruptedException e) { Thread.currentThread().interrupt(); } } /** * Callback method <BR> * Called by the ProtocolStack when a message is received. * It will be added to the message queue from which subsequent * <code>Receive</code>s will dequeue it. * @param evt the event carrying the message from the protocol stack */ public Object up(Event evt) { int type=evt.getType(); Message msg; switch(type) { case Event.MSG: msg=(Message)evt.getArg(); if(stats) { received_msgs++; received_bytes+=msg.getLength(); } if(!receive_local_msgs) { // discard local messages (sent by myself to me) if(local_addr != null && msg.getSrc() != null) if(local_addr.equals(msg.getSrc())) return null; } break; case Event.VIEW_CHANGE: View tmp=(View)evt.getArg(); if(tmp instanceof MergeView) my_view=new View(tmp.getVid(), tmp.getMembers()); else my_view=tmp; /* * Bela&Vladimir Oct 27th,2006 (JGroups 2.4)- we need to switch to * connected=true because client can invoke channel.getView() in * viewAccepted() callback invoked on this thread * (see Event.VIEW_CHANGE handling below) */ // not good: we are only connected when we returned from connect() - bela June 22 2007 // if(connected == false) { // connected=true; // } break; case Event.CONFIG: Map<String,Object> cfg=(Map<String,Object>)evt.getArg(); if(cfg != null) { if(cfg.containsKey("state_transfer")) { state_transfer_supported=((Boolean)cfg.get("state_transfer")).booleanValue(); } if(cfg.containsKey("flush_supported")) { flush_supported=((Boolean)cfg.get("flush_supported")).booleanValue(); } cfg.putAll(cfg); } break; case Event.GET_STATE_OK: StateTransferInfo state_info=(StateTransferInfo)evt.getArg(); byte[] state=state_info.state; try { if(up_handler != null) { return up_handler.up(evt); } if(state != null) { String state_id=state_info.state_id; if(receiver != null) { try { if(receiver instanceof ExtendedReceiver && state_id != null) ((ExtendedReceiver)receiver).setState(state_id, state); else receiver.setState(state); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling setState() in receiver", t); } } else { try { mq.add(new Event(Event.STATE_RECEIVED, state_info)); } catch(Exception e) { } } } } finally { state_promise.setResult(state != null? Boolean.TRUE : Boolean.FALSE); } break; case Event.STATE_TRANSFER_INPUTSTREAM_CLOSED: state_promise.setResult(Boolean.TRUE); break; case Event.STATE_TRANSFER_INPUTSTREAM: StateTransferInfo sti=(StateTransferInfo)evt.getArg(); InputStream is=sti.inputStream; //Oct 13,2006 moved to down() when Event.STATE_TRANSFER_INPUTSTREAM_CLOSED is received //state_promise.setResult(is != null? Boolean.TRUE : Boolean.FALSE); if(up_handler != null) { return up_handler.up(evt); } if(is != null) { if(receiver instanceof ExtendedReceiver) { try { if(sti.state_id == null) ((ExtendedReceiver)receiver).setState(is); else ((ExtendedReceiver)receiver).setState(sti.state_id, is); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling setState() in receiver", t); } } else if(receiver instanceof Receiver){ if(log.isWarnEnabled()){ log.warn("Channel has STREAMING_STATE_TRANSFER, however," + " application does not implement ExtendedMessageListener. State is not transfered"); Util.close(is); } } else { try { mq.add(new Event(Event.STATE_TRANSFER_INPUTSTREAM, sti)); } catch(Exception e) { } } } break; case Event.GET_LOCAL_ADDRESS: return local_addr; default: break; } // If UpHandler is installed, pass all events to it and return (UpHandler is e.g. a building block) if(up_handler != null) { Object ret=up_handler.up(evt); if(type == Event.UNBLOCK){ flush_unblock_promise.setResult(Boolean.TRUE); } return ret; } switch(type) { case Event.MSG: if(receiver != null) { try { receiver.receive((Message)evt.getArg()); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling receive() in receiver", t); } return null; } break; case Event.VIEW_CHANGE: if(receiver != null) { try { receiver.viewAccepted((View)evt.getArg()); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling viewAccepted() in receiver", t); } return null; } break; case Event.SUSPECT: if(receiver != null) { try { receiver.suspect((Address)evt.getArg()); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling suspect() in receiver", t); } return null; } break; case Event.GET_APPLSTATE: if(receiver != null) { StateTransferInfo state_info=(StateTransferInfo)evt.getArg(); byte[] tmp_state=null; String state_id=state_info.state_id; try { if(receiver instanceof ExtendedReceiver && state_id!=null) { tmp_state=((ExtendedReceiver)receiver).getState(state_id); } else { tmp_state=receiver.getState(); } } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling getState() in receiver", t); } return new StateTransferInfo(null, state_id, 0L, tmp_state); } break; case Event.STATE_TRANSFER_OUTPUTSTREAM: StateTransferInfo sti=(StateTransferInfo)evt.getArg(); OutputStream os=sti.outputStream; if(receiver instanceof ExtendedReceiver) { if(os != null) { try { if(sti.state_id == null) ((ExtendedReceiver)receiver).getState(os); else ((ExtendedReceiver)receiver).getState(sti.state_id, os); } catch(Throwable t) { if(log.isWarnEnabled()) log.warn("failed calling getState() in receiver", t); } } } else if(receiver instanceof Receiver){ if(log.isWarnEnabled()){ log.warn("Channel has STREAMING_STATE_TRANSFER, however," + " application does not implement ExtendedMessageListener. State is not transfered"); Util.close(os); } } break; case Event.BLOCK: if(!receive_blocks) { // discard if client has not set 'receiving blocks' to 'on' return Boolean.TRUE; } if(receiver != null) { try { receiver.block(); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed calling block() in receiver", t); } return Boolean.TRUE; } break; case Event.UNBLOCK: //invoke receiver if block receiving is on if(receive_blocks && receiver instanceof ExtendedReceiver) { try { ((ExtendedReceiver)receiver).unblock(); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed calling unblock() in receiver", t); } } //flip promise flush_unblock_promise.setResult(Boolean.TRUE); return null; default: break; } if(type == Event.MSG || type == Event.VIEW_CHANGE || type == Event.SUSPECT || type == Event.GET_APPLSTATE || type== Event.STATE_TRANSFER_OUTPUTSTREAM || type == Event.BLOCK || type == Event.UNBLOCK) { try { mq.add(evt); } catch(QueueClosedException queue_closed) { ; // ignore } catch(Exception e) { if(log.isWarnEnabled()) log.warn("exception adding event " + evt + " to message queue", e); } } if(type == Event.GET_APPLSTATE) { try { return applstate_exchanger.exchange(null); } catch(InterruptedException e) { Thread.currentThread().interrupt(); return null; } } return null; } /** * Sends a message through the protocol stack if the stack is available * @param evt the message to send down, encapsulated in an event */ public void down(Event evt) { if(evt == null) return; switch(evt.getType()) { case Event.CONFIG: try { Map<String,Object> m=(Map<String,Object>)evt.getArg(); if(m != null) { additional_data.putAll(m); if(m.containsKey("additional_data")) { byte[] tmp=(byte[])m.get("additional_data"); if(local_addr != null) local_addr.setAdditionalData(tmp); } } } catch(Throwable t) { if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t); } break; } prot_stack.down(evt); } public Object downcall(Event evt) { if(evt == null) return null; switch(evt.getType()) { case Event.CONFIG: try { Map<String,Object> m=(Map<String,Object>)evt.getArg(); if(m != null) { additional_data.putAll(m); if(m.containsKey("additional_data")) { byte[] tmp=(byte[])m.get("additional_data"); if(local_addr != null) local_addr.setAdditionalData(tmp); } } } catch(Throwable t) { if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t); } break; } return prot_stack.down(evt); } @ManagedOperation public String toString(boolean details) { StringBuilder sb=new StringBuilder(); sb.append("local_addr=").append(local_addr).append('\n'); sb.append("cluster_name=").append(cluster_name).append('\n'); sb.append("my_view=").append(my_view).append('\n'); sb.append("connected=").append(connected).append('\n'); sb.append("closed=").append(closed).append('\n'); sb.append("incoming queue size=").append(mq.size()).append('\n'); if(details) { sb.append("receive_blocks=").append(receive_blocks).append('\n'); sb.append("receive_local_msgs=").append(receive_local_msgs).append('\n'); sb.append("state_transfer_supported=").append(state_transfer_supported).append('\n'); sb.append("props=").append(getProperties()).append('\n'); } return sb.toString(); } /* ----------------------------------- Private Methods ------------------------------------- */ protected final void init(ProtocolStackConfigurator configurator) throws ChannelException { if(log.isInfoEnabled()) log.info("JGroups version: " + Version.description); // ConfiguratorFactory.substituteVariables(configurator); // replace vars with system props String tmp=configurator.getProtocolStackString(); tmp=Util.substituteVariable(tmp); // replace vars with system props synchronized(getClass()) { prot_stack=new ProtocolStack(this, tmp); try { prot_stack.setup(); // Setup protocol stack (creates protocol, calls init() on them) properties=tmp; } catch(Throwable e) { throw new ChannelException("unable to setup the protocol stack: " + e.getMessage(), e); } } } protected final void init(JChannel ch) throws ChannelException { if(ch == null) throw new IllegalArgumentException("channel is null"); if(log.isInfoEnabled()) log.info("JGroups version: " + Version.description); synchronized(getClass()) { prot_stack=new ProtocolStack(this, null); try { prot_stack.setup(ch.getProtocolStack()); // Setup protocol stack (creates protocol, calls init() on them) getProperties(); } catch(Throwable e) { throw new ChannelException("unable to setup the protocol stack: " + e.getMessage(), e); } } } /** * Initializes all variables. Used after <tt>close()</tt> or <tt>disconnect()</tt>, * to be ready for new <tt>connect()</tt> */ private void init() { if(local_addr != null) down(new Event(Event.REMOVE_ADDRESS, local_addr)); local_addr=null; cluster_name=null; my_view=null; // changed by Bela Sept 25 2003 //if(mq != null && mq.closed()) // mq.reset(); connected=false; } private void startStack(String cluster_name) throws ChannelException { /*make sure the channel is not closed*/ checkClosed(); /*make sure we have a valid channel name*/ if(cluster_name == null) { if(log.isDebugEnabled()) log.debug("cluster_name is null, assuming unicast channel"); } else this.cluster_name=cluster_name; try { prot_stack.startStack(cluster_name, local_addr); // calls start() in all protocols, from top to bottom } catch(Throwable e) { throw new ChannelException("failed to start protocol stack", e); } /*create a temporary view, assume this channel is the only member and is the coordinator*/ Vector<Address> t=new Vector<Address>(1); t.addElement(local_addr); my_view=new View(local_addr, 0, t); // create a dummy view TP transport=prot_stack.getTransport(); transport.registerProbeHandler(probe_handler); } /** * Generates new UUID and sets local address. Sends down a REMOVE_ADDRESS (if existing address was present) and * a SET_LOCAL_ADDRESS */ private void setAddress() { UUID old_addr=local_addr; local_addr=UUID.randomUUID(); byte[] buf=(byte[])additional_data.get("additional_data"); if(buf != null) local_addr.setAdditionalData(buf); if(old_addr != null) down(new Event(Event.REMOVE_ADDRESS, old_addr)); if(name == null || name.length() == 0) // generate a logical name if not set name=Util.generateLocalName(); if(name != null && name.length() > 0) UUID.add(local_addr, name); Event evt=new Event(Event.SET_LOCAL_ADDRESS, local_addr); down(evt); if(up_handler != null) up_handler.up(evt); } /** * health check<BR> * throws a ChannelClosed exception if the channel is closed */ protected void checkClosed() throws ChannelClosedException { if(closed) throw new ChannelClosedException(); } protected void checkClosedOrNotConnected() throws ChannelNotConnectedException, ChannelClosedException { if(closed) throw new ChannelClosedException(); if(!connected) throw new ChannelNotConnectedException(); } /** * returns the value of the event<BR> * These objects will be returned<BR> * <PRE> * <B>Event Type - Return Type</B> * Event.MSG - returns a Message object * Event.VIEW_CHANGE - returns a View object * Event.SUSPECT - returns a SuspectEvent object * Event.BLOCK - returns a new BlockEvent object * Event.GET_APPLSTATE - returns a GetStateEvent object * Event.STATE_RECEIVED- returns a SetStateEvent object * Event.Exit - returns an ExitEvent object * All other - return the actual Event object * </PRE> * @param evt - the event of which you want to extract the value * @return the event value if it matches the select list, * returns null if the event is null * returns the event itself if a match (See above) can not be made of the event type */ static Object getEvent(Event evt) { if(evt == null) return null; // correct ? switch(evt.getType()) { case Event.MSG: return evt.getArg(); case Event.VIEW_CHANGE: return evt.getArg(); case Event.SUSPECT: return new SuspectEvent(evt.getArg()); case Event.BLOCK: return new BlockEvent(); case Event.UNBLOCK: return new UnblockEvent(); case Event.GET_APPLSTATE: StateTransferInfo info=(StateTransferInfo)evt.getArg(); return new GetStateEvent(info.target, info.state_id); case Event.STATE_RECEIVED: info=(StateTransferInfo)evt.getArg(); return new SetStateEvent(info.state, info.state_id); case Event.STATE_TRANSFER_OUTPUTSTREAM: info = (StateTransferInfo)evt.getArg(); return new StreamingGetStateEvent(info.outputStream,info.state_id); case Event.STATE_TRANSFER_INPUTSTREAM: info = (StateTransferInfo)evt.getArg(); return new StreamingSetStateEvent(info.inputStream,info.state_id); default: return evt; } } /** * Disconnects and closes the channel. * This method does the following things * <ol> * <li>Calls <code>this.disconnect</code> if the disconnect parameter is true * <li>Calls <code>Queue.close</code> on mq if the close_mq parameter is true * <li>Calls <code>ProtocolStack.stop</code> on the protocol stack * <li>Calls <code>ProtocolStack.destroy</code> on the protocol stack * <li>Sets the channel closed and channel connected flags to true and false * <li>Notifies any channel listener of the channel close operation * </ol> */ protected void _close(boolean disconnect, boolean close_mq) { UUID old_addr=local_addr; if(closed) return; if(disconnect) disconnect(); // leave group if connected if(close_mq) closeMessageQueue(false); stopStack(true, true); closed=true; connected=false; notifyChannelClosed(this); init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining if(old_addr != null) UUID.remove(old_addr); } protected void stopStack(boolean stop, boolean destroy) { if(prot_stack != null) { try { if(stop) prot_stack.stopStack(cluster_name); if(destroy) prot_stack.destroy(); } catch(Exception e) { if(log.isErrorEnabled()) log.error("failed destroying the protocol stack", e); } TP transport=prot_stack.getTransport(); if(transport != null) transport.unregisterProbeHandler(probe_handler); } } public final void closeMessageQueue(boolean flush_entries) { mq.close(flush_entries); } public boolean flushSupported() { return flush_supported; } /** * Will perform a flush of the system, ie. all pending messages are flushed out of the * system and all members ack their reception. After this call returns, no member will * be sending any messages until {@link #stopFlush()} is called. * <p> * In case of flush collisions, random sleep time backoff algorithm is employed and * flush is reattempted for numberOfAttempts. Therefore this method is guaranteed * to return after timeout x numberOfAttempts miliseconds. * * @param automatic_resume Call {@link #stopFlush()} after the flush * @return true if FLUSH completed within the timeout */ public boolean startFlush(boolean automatic_resume) { if(!flushSupported()) { throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } boolean successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND)); if(automatic_resume) stopFlush(); return successfulFlush; } /** * Performs a partial flush in a cluster for flush participants. * <p> * All pending messages are flushed out only for flush participants. * Remaining members in a cluster are not included in flush. * Flush participants should be a proper subset of a current view. * * <p> * In case of flush collisions, random sleep time backoff algorithm is employed and * flush is reattempted for numberOfAttempts. Therefore this method is guaranteed * to return after timeout x numberOfAttempts miliseconds. * * @param automatic_resume Call {@link #stopFlush()} after the flush * @return true if FLUSH completed within the timeout */ public boolean startFlush(List<Address> flushParticipants,boolean automatic_resume) { boolean successfulFlush = false; if(!flushSupported()){ throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } View v = getView(); if(v != null && v.getMembers().containsAll(flushParticipants)){ successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND, flushParticipants)); }else{ throw new IllegalArgumentException("Current view " + v + " does not contain all flush participants " + flushParticipants); } if(automatic_resume) stopFlush(flushParticipants); return successfulFlush; } /** * Will perform a flush of the system, ie. all pending messages are flushed out of the * system and all members ack their reception. After this call returns, no member will * be sending any messages until {@link #stopFlush()} is called. * <p> * In case of flush collisions, random sleep time backoff algorithm is employed and * flush is reattempted for numberOfAttempts. Therefore this method is guaranteed * to return after timeout x numberOfAttempts miliseconds. * @param timeout * @param automatic_resume Call {@link #stopFlush()} after the flush * @return true if FLUSH completed within the timeout */ public boolean startFlush(long timeout, boolean automatic_resume) { return startFlush(automatic_resume); } public void stopFlush() { if(!flushSupported()) { throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } flush_unblock_promise.reset(); down(new Event(Event.RESUME)); //do not return until UNBLOCK event is received try { flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT); } catch(TimeoutException te) { log.warn("Timeout waiting for UNBLOCK event at " + getAddress()); } } public void stopFlush(List<Address> flushParticipants) { if(!flushSupported()) { throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration"); } flush_unblock_promise.reset(); down(new Event(Event.RESUME, flushParticipants)); // do not return until UNBLOCK event is received try { flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT); } catch(TimeoutException te) { log.warn("Timeout waiting for UNBLOCK event at " + getAddress()); } } @Override public Map<String, Object> getInfo(){ return new HashMap<String, Object>(config); } public void setInfo(String key, Object value) { if(key != null) config.put(key, value); } Address determineCoordinator() { Vector<Address> mbrs=my_view != null? my_view.getMembers() : null; if(mbrs == null) return null; if(!mbrs.isEmpty()) return mbrs.firstElement(); return null; } private TimeScheduler getTimer() { if(prot_stack != null) { TP transport=prot_stack.getTransport(); if(transport != null) { return transport.getTimer(); } } return null; } /* ------------------------------- End of Private Methods ---------------------------------- */ class MyProbeHandler implements TP.ProbeHandler { public Map<String, String> handleProbe(String... keys) { HashMap<String, String> map=new HashMap<String, String>(2); for(String key: keys) { if(key.startsWith("jmx")) { Map<String, Object> tmp_stats; int index=key.indexOf("="); if(index > -1) { String value=key.substring(index +1); tmp_stats=dumpStats(value); } else tmp_stats=dumpStats(); map.put("jmx", tmp_stats != null? Util.mapToString(tmp_stats) : "null"); continue; } if(key.equals("info")) { Map<String, Object> tmp_info=getInfo(); map.put("info", tmp_info != null? Util.mapToString(tmp_info) : "null"); } } map.put("version", Version.description + ", cvs=\"" + Version.cvs + "\""); if(my_view != null && !map.containsKey("view")) map.put("view", my_view.toString()); map.put("local_addr", getAddressAsString() + " [" + getAddressAsUUID() + "]"); map.put("cluster", getClusterName()); return map; } public String[] supportedKeys() { return new String[]{"jmx", "info"}; } } }
Added state and address information to exceptions.
src/org/jgroups/JChannel.java
Added state and address information to exceptions.
Java
apache-2.0
a4466a1a23287af151185a1775e204976eff7588
0
googleinterns/step128-2020,googleinterns/step128-2020,googleinterns/step128-2020
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.servlets; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.EntityNotFoundException; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import com.google.appengine.api.datastore.PreparedQuery; import com.google.appengine.api.datastore.Query; import com.google.appengine.api.datastore.Query.SortDirection; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.gson.Gson; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.logging.Logger; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @WebServlet("/user") public class UserServlet extends HttpServlet { private static final Logger LOGGER = Logger.getLogger(UserServlet.class.getName()); @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { // returns a list of events DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); UserService userService = UserServiceFactory.getUserService(); Gson gson = new Gson(); List<Entity> events = new ArrayList<>(); response.setContentType("application/json"); if (userService.isUserLoggedIn()) { String userEmail = userService.getCurrentUser().getEmail(); Key userKey = KeyFactory.createKey("User", userEmail); try { Entity userEntity = datastore.get(userKey); switch (request.getParameter("get")) { case "saved": events = getHandleSaved(userEntity); break; case "created": events = getHandleCreated(userEmail); break; default: throw new IOException("missing or invalid parameters"); } LOGGER.info("queried for events @ account " + userEmail); } catch (EntityNotFoundException exception) { // datastore entry has not been created yet for this user, create it now Entity entity = new Entity(userKey); entity.setProperty("id", userEmail); datastore.put(entity); } } else { // return a list with all created events PreparedQuery results = datastore.prepare(new Query("Event").addSort("eventName", SortDirection.ASCENDING)); for (Entity e : results.asIterable()) { events.add(e); } } // TODO: apply any sort params Collections.sort(events, Utils.ORDER_BY_NAME); response.getWriter().println(gson.toJson(events)); } // returns a list of all events saved by a user entity private List<Entity> getHandleSaved(Entity userEntity) { DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); List<Entity> results = new ArrayList<>(); // get the list of saved events (stored by id) @SuppressWarnings("unchecked") List<Long> savedEvents = (ArrayList<Long>) userEntity.getProperty("saved"); if (savedEvents != null) { for (long l : savedEvents) { try { results.add(datastore.get(KeyFactory.createKey("Event", l))); } catch (EntityNotFoundException exception) { LOGGER.info("entity not found for event id " + l); } } } return results; } // returns a list of all events created by a user (identified by email id) private List<Entity> getHandleCreated(String userEmail) { DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); List<Entity> results = new ArrayList<>(); Query query = new Query("Event") .setFilter(new Query.FilterPredicate("creator", Query.FilterOperator.EQUAL, userEmail)); PreparedQuery queried = datastore.prepare(query); for (Entity e : queried.asIterable()) { results.add(e); } return results; } @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException { // adds or removes events from user's saved events list DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); UserService userService = UserServiceFactory.getUserService(); if (!userService.isUserLoggedIn()) { throw new IOException("must be logged in"); } if (request.getParameter("event") == null) { throw new IOException("no event key specified"); } Long eventId = 0L; try { eventId = Long.parseLong(request.getParameter("event")); } catch (NumberFormatException e) { throw new IOException("invalid format for event key"); } // Handle the logic String userEmail = userService.getCurrentUser().getEmail(); Key userKey = KeyFactory.createKey("User", userEmail); try { Entity userEntity = datastore.get(userKey); List<Long> saved = (ArrayList<Long>) userEntity.getProperty("saved"); if (saved == null) { saved = new ArrayList<>(); } switch (request.getParameter("action")) { case "save": postHandleSave(saved, eventId); break; case "unsave": postHandleUnsave(saved, eventId); break; default: throw new IOException("missing or invalid parameters"); } userEntity.setProperty("saved", saved); datastore.put(userEntity); } catch (EntityNotFoundException exception) { // datastore entry has not been created yet for this user, create it now Entity entity = new Entity(userKey); entity.setProperty("id", userEmail); datastore.put(entity); } response.sendRedirect("/my-events.html"); } // adds event id to list if it is not already present private void postHandleSave(List<Long> saved, long eventId) { DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); Key eventKey = KeyFactory.createKey("Event", eventId); try { Entity eventEntity = datastore.get(eventKey); if (saved.contains(eventId)) { LOGGER.info("event " + eventId + " has already been saved"); return; } Object attendees = eventEntity.getProperty("attendeeCount"); int attendeeCount = 1; if (attendees != null) { try { attendeeCount += Integer.parseInt(attendees.toString()); } catch (NumberFormatException num) { LOGGER.info("error parsing attendee count for event id " + eventId); attendeeCount = 0; } } eventEntity.setProperty("attendeeCount", attendeeCount); datastore.put(eventEntity); saved.add(eventId); } catch (EntityNotFoundException e) { LOGGER.info("event " + eventId + " does not exist"); } } // removes event id from list if it is present private void postHandleUnsave(List<Long> saved, long eventId) { for (int i = 0; i < saved.size(); i++) { if (saved.get(i) == eventId) { saved.remove(i); DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); Key eventKey = KeyFactory.createKey("Event", eventId); try { Entity eventEntity = datastore.get(eventKey); Object attendees = eventEntity.getProperty("attendeeCount"); int attendeeCount = -1; if (attendees != null) { try { attendeeCount += Integer.parseInt(attendees.toString()); } catch (NumberFormatException num) { LOGGER.info("error parsing attendee count for event id " + eventId); attendeeCount = 0; } } if (attendeeCount < 0) { attendeeCount = 0; } eventEntity.setProperty("attendeeCount", attendeeCount); datastore.put(eventEntity); } catch (EntityNotFoundException e) { LOGGER.info("event " + eventId + " does not exist"); } return; } } LOGGER.info("event " + eventId + " has not been saved yet"); } }
src/main/java/com/google/sps/servlets/UserServlet.java
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.servlets; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.EntityNotFoundException; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import com.google.appengine.api.datastore.PreparedQuery; import com.google.appengine.api.datastore.Query; import com.google.appengine.api.datastore.Query.SortDirection; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.gson.Gson; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.logging.Logger; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @WebServlet("/user") public class UserServlet extends HttpServlet { private static final Logger LOGGER = Logger.getLogger(UserServlet.class.getName()); @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { // returns a list of events DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); UserService userService = UserServiceFactory.getUserService(); Gson gson = new Gson(); List<Entity> events = new ArrayList<>(); response.setContentType("application/json"); if (userService.isUserLoggedIn()) { String userEmail = userService.getCurrentUser().getEmail(); Key userKey = KeyFactory.createKey("User", userEmail); try { Entity userEntity = datastore.get(userKey); switch (request.getParameter("get")) { case "saved": events = getHandleSaved(userEntity); break; case "created": events = getHandleCreated(userEmail); break; default: throw new IOException("missing or invalid parameters"); } LOGGER.info("queried for events @ account " + userEmail); } catch (EntityNotFoundException exception) { // datastore entry has not been created yet for this user, create it now Entity entity = new Entity(userKey); entity.setProperty("id", userEmail); datastore.put(entity); } } else { // return a list with all created events PreparedQuery results = datastore.prepare(new Query("Event").addSort("eventName", SortDirection.ASCENDING)); for (Entity e : results.asIterable()) { events.add(e); } } // TODO: apply any sort params Collections.sort(events, Utils.ORDER_BY_NAME); response.getWriter().println(gson.toJson(events)); } // returns a list of all events saved by a user entity private List<Entity> getHandleSaved(Entity userEntity) { DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); List<Entity> results = new ArrayList<>(); // get the list of saved events (stored by id) @SuppressWarnings("unchecked") List<Long> savedEvents = (ArrayList<Long>) userEntity.getProperty("saved"); if (savedEvents != null) { for (long l : savedEvents) { try { results.add(datastore.get(KeyFactory.createKey("Event", l))); } catch (EntityNotFoundException exception) { LOGGER.info("entity not found for event id " + l); } } } return results; } // returns a list of all events created by a user (identified by email id) private List<Entity> getHandleCreated(String userEmail) { DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); List<Entity> results = new ArrayList<>(); Query query = new Query("Event") .setFilter(new Query.FilterPredicate("creator", Query.FilterOperator.EQUAL, userEmail)); PreparedQuery queried = datastore.prepare(query); for (Entity e : queried.asIterable()) { results.add(e); } return results; } @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException { // adds or removes events from user's saved events list DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); UserService userService = UserServiceFactory.getUserService(); if (!userService.isUserLoggedIn()) { throw new IOException("must be logged in"); } if (request.getParameter("event") == null) { throw new IOException("no event key specified"); } Long eventId = 0L; try { eventId = Long.parseLong(request.getParameter("event")); } catch (NumberFormatException e) { throw new IOException("invalid format for event key"); } // Handle the logic String userEmail = userService.getCurrentUser().getEmail(); Key userKey = KeyFactory.createKey("User", userEmail); try { Entity userEntity = datastore.get(userKey); List<Long> saved = (ArrayList<Long>) userEntity.getProperty("saved"); if (saved == null) { saved = new ArrayList<>(); } switch (request.getParameter("action")) { case "save": postHandleSave(saved, eventId); break; case "unsave": postHandleUnsave(saved, eventId); break; default: throw new IOException("missing or invalid parameters"); } userEntity.setProperty("saved", saved); datastore.put(userEntity); } catch (EntityNotFoundException exception) { // datastore entry has not been created yet for this user, create it now Entity entity = new Entity(userKey); entity.setProperty("id", userEmail); datastore.put(entity); } } // adds event id to list if it is not already present private void postHandleSave(List<Long> saved, long eventId) { DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); Key eventKey = KeyFactory.createKey("Event", eventId); try { Entity eventEntity = datastore.get(eventKey); if (saved.contains(eventId)) { LOGGER.info("event " + eventId + " has already been saved"); return; } Object attendees = eventEntity.getProperty("attendeeCount"); int attendeeCount = 1; if (attendees != null) { try { attendeeCount += Integer.parseInt(attendees.toString()); } catch (NumberFormatException num) { LOGGER.info("error parsing attendee count for event id " + eventId); attendeeCount = 0; } } eventEntity.setProperty("attendeeCount", attendeeCount); datastore.put(eventEntity); saved.add(eventId); } catch (EntityNotFoundException e) { LOGGER.info("event " + eventId + " does not exist"); } } // removes event id from list if it is present private void postHandleUnsave(List<Long> saved, long eventId) { for (int i = 0; i < saved.size(); i++) { if (saved.get(i) == eventId) { saved.remove(i); DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); Key eventKey = KeyFactory.createKey("Event", eventId); try { Entity eventEntity = datastore.get(eventKey); Object attendees = eventEntity.getProperty("attendeeCount"); int attendeeCount = -1; if (attendees != null) { try { attendeeCount += Integer.parseInt(attendees.toString()); } catch (NumberFormatException num) { LOGGER.info("error parsing attendee count for event id " + eventId); attendeeCount = 0; } } if (attendeeCount < 0) { attendeeCount = 0; } eventEntity.setProperty("attendeeCount", attendeeCount); datastore.put(eventEntity); } catch (EntityNotFoundException e) { LOGGER.info("event " + eventId + " does not exist"); } return; } } LOGGER.info("event " + eventId + " has not been saved yet"); } }
Add redirect to my-events
src/main/java/com/google/sps/servlets/UserServlet.java
Add redirect to my-events
Java
apache-2.0
733af431ccd067903786e824673abfe45b3137ff
0
h136799711/banma_android
package com.itboye.banma.welcome; import java.util.ArrayList; import java.util.List; import org.json.JSONException; import org.json.JSONObject; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.ImageRequest; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.itboye.banma.R; import com.itboye.banma.activities.HomePageActivity; import com.itboye.banma.api.StrUIDataListener; import com.itboye.banma.api.StrVolleyInterface; import com.itboye.banma.app.AppContext; import com.itboye.banma.app.Constant; import com.itboye.banma.entity.ProductItem; import com.itboye.banma.service.TokenIntentService; import com.itboye.banma.utils.BitmapCache; import com.itboye.banma.utils.BitmapCacheHomageImage; import com.itboye.banma.utils.SharedConfig; import com.umeng.analytics.AnalyticsConfig; import com.umeng.analytics.MobclickAgent; import com.umeng.message.PushAgent; import com.umeng.message.UmengRegistrar; import android.os.Bundle; import android.os.Handler; import android.support.v4.view.PagerAdapter; import android.support.v4.view.ViewPager; import android.support.v4.view.ViewPager.OnPageChangeListener; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.view.animation.Animation.AnimationListener; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.Toast; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; public class WelcomeActivity extends Activity implements StrUIDataListener, OnPageChangeListener, OnClickListener { private final int GETTOKEN = 0; //获取Token private final int BABY = 1; //获取商品 private View view; private Context context; private ViewPager viewPager; private PagerAdapter pagerAdapter; private Button startButton,enter; private LinearLayout indicatorLayout; private ArrayList<View> views; private ImageView[] indicators = null; private int[] images; private AppContext appContext; private StrVolleyInterface networkHelper; private Animation animation; private int net_state = -1; boolean finish_a = false; Intent intent; private String nextBck; //传到首页暂时充当背景图片的地址,根据这个地址可以再缓存中找到图片 @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); view = View.inflate(this, R.layout.activity_welcome, null); setContentView(view); context = this; appContext = (AppContext) getApplication(); networkHelper = new StrVolleyInterface(this); networkHelper.setStrUIDataListener(this); //友盟启动 MobclickAgent.updateOnlineConfig( this ); //加密处理 AnalyticsConfig.enableEncrypt(true); MobclickAgent.setDebugMode( true ); //开启推送服务 PushAgent mPushAgent = PushAgent.getInstance(getApplicationContext()); mPushAgent.enable(); //统计应用启动次数 PushAgent.getInstance(getApplicationContext()).onAppStart(); //获取测试设备的Device Token。 String device_token = UmengRegistrar.getRegistrationId(this); System.out.println(device_token+"设备"); images = new int[] { R.drawable.welcome_01, R.drawable.welcome2, }; initView(); initAnimation(); } //友盟统计 @Override protected void onResume() { super.onResume(); MobclickAgent.onResume(this); } public void onPause() { super.onPause(); MobclickAgent.onPause(this); } private void initView() { net_state = GETTOKEN; //请求token if (appContext.isNetworkConnected()==false) { Toast.makeText(WelcomeActivity.this, "请检查网络是否连接", Toast.LENGTH_LONG).show(); } try { appContext.getToken(WelcomeActivity.this, "client_credentials", "by559a8de1c325c1", "aedd16f80c192661016eebe3ac35a6e7",networkHelper); } catch (Exception e) { // Toast.makeText(WelcomeActivity.this, "访问异常" + e, // Toast.LENGTH_LONG).show(); e.printStackTrace(); Log.v("获取token异常",e+"" ); } viewPager = (ViewPager) view.findViewById(R.id.viewpage); startButton = (Button) view.findViewById(R.id.start_Button); startButton.setOnClickListener(WelcomeActivity.this); enter=(Button)view.findViewById(R.id.enter_app); enter.setOnClickListener(WelcomeActivity.this); indicatorLayout = (LinearLayout) view.findViewById(R.id.indicator); views = new ArrayList<View>(); indicators = new ImageView[images.length]; for (int i = 0; i < images.length; i++) { ImageView imageView = new ImageView(context); imageView.setBackgroundResource(images[i]); views.add(imageView); indicators[i] = new ImageView(context); indicators[i].setBackgroundResource(R.drawable.indicators_default); if (i == 0) { indicators[i].setBackgroundResource(R.drawable.indicators_now); } indicatorLayout.addView(indicators[i]); } pagerAdapter = new BasePagerAdapter(views); viewPager.setAdapter(pagerAdapter); viewPager.setOnPageChangeListener(WelcomeActivity.this); } private void initAnimation() { animation = AnimationUtils.loadAnimation(this, R.anim.alpha); view.startAnimation(animation); animation.setAnimationListener(new AnimationListener() { @Override public void onAnimationStart(Animation arg0) { } @Override public void onAnimationRepeat(Animation arg0) { } @Override public void onAnimationEnd(Animation arg0) { new Handler().postDelayed(new Runnable() { @Override public void run() { /*if (first) { intent = new Intent(AppStartActivity.this, WelcomeActivity.class); } else { intent = new Intent(AppStartActivity.this, HomePageActivity.class); }*/ if(finish_a == false){ finish_a = true; intent = new Intent(WelcomeActivity.this, HomePageActivity.class); intent.putExtra("nextBck", nextBck); startActivity(intent); overridePendingTransition(R.anim.s_in_from_right, R.anim.s_out_to_left); WelcomeActivity.this.finish(); } } }, 650); } }); } @Override public void onClick(View v) { if (v.getId() == R.id.start_Button) { new SharedConfig(this); SharedPreferences shared = SharedConfig.GetConfig(); Editor editor = shared.edit(); editor.putBoolean("First", false); editor.commit(); if(finish_a == false){ finish_a = true; intent = new Intent(WelcomeActivity.this, HomePageActivity.class); intent.putExtra("nextBck", nextBck); startActivity(intent); overridePendingTransition(R.anim.s_in_from_right, R.anim.s_out_to_left); WelcomeActivity.this.finish(); } } if (v.getId() == R.id.enter_app) { new SharedConfig(this); SharedPreferences shared = SharedConfig.GetConfig(); Editor editor = shared.edit(); editor.putBoolean("First", false); editor.commit(); if(finish_a == false){ finish_a = true; intent = new Intent(WelcomeActivity.this, HomePageActivity.class); intent.putExtra("nextBck", nextBck); startActivity(intent); overridePendingTransition(R.anim.s_in_from_right, R.anim.s_out_to_left); WelcomeActivity.this.finish(); } } } @Override public void onPageScrollStateChanged(int arg0) { // TODO Auto-generated method stub } @Override public void onPageScrolled(int arg0, float arg1, int arg2) { // TODO Auto-generated method stub } @Override public void onPageSelected(int arg0) { if (arg0 == indicators.length - 1) { startButton.setVisibility(View.VISIBLE); enter.setVisibility(View.GONE); } else { startButton.setVisibility(View.GONE); enter.setVisibility(View.VISIBLE); } for (int i = 0; i < indicators.length; i++) { indicators[i] .setBackgroundResource(R.drawable.indicators_default); if (arg0 == i) { indicators[arg0].setBackgroundResource(R.drawable.indicators_now); } } } @Override public void onErrorHappened(VolleyError error) { // TODO Auto-generated method stub // Toast.makeText(WelcomeActivity.this, "加载失败" + error, Toast.LENGTH_LONG) // .show(); this.startService(new Intent(this,TokenIntentService.class)); AppContext.setTokenSuccess(false); net_state = -1; Log.v("获取token",error.toString() ); } @Override public void onDataChanged(String data) { // TODO Auto-generated method stub String access_token = null; JSONObject jsonObject = null; int code = -1; try { jsonObject=new JSONObject(data); code = jsonObject.getInt("code"); } catch (JSONException e1) { e1.printStackTrace(); } if(net_state == GETTOKEN){ if (code == 0) { try { JSONObject tempdata=(JSONObject) jsonObject.get("data"); access_token = tempdata.getString("access_token"); Log.v("获取token",access_token+"1"); AppContext.setAccess_token(access_token); AppContext.setTokenSuccess(true); this.startService(new Intent(this,TokenIntentService.class)); } catch (JSONException e) { e.printStackTrace(); } initData(); //现提前加载几个商品,免得进入主页加载过慢 /*Toast.makeText(WelcomeActivity.this, "获取token成功:" + access_token, Toast.LENGTH_LONG) .show();*/ } else { net_state = -1; AppContext.setTokenSuccess(false); this.startService(new Intent(this,TokenIntentService.class)); // Toast.makeText(WelcomeActivity.this, "获取token失败:code=" + code, Toast.LENGTH_LONG) // .show(); } }else if(net_state == BABY){ Gson gson = new Gson(); String producData; try { final BitmapCacheHomageImage bitmapCache = new BitmapCacheHomageImage(); producData = jsonObject.getString("data"); JSONObject jsondata = new JSONObject(producData); String producList = jsondata.getString("list"); List<ProductItem> productlist = gson.fromJson(producList, new TypeToken<List<ProductItem>>() { }.getType()); if (productlist != null) { nextBck = productlist.get(0).getImg_post_bg(); for(int k=0; k<productlist.size(); k++){ final String url_bg = productlist.get(k).getImg_post_bg(); ImageRequest imageRequest_bg = new ImageRequest( url_bg, new Response.Listener<Bitmap>() { @Override public void onResponse(Bitmap response) { bitmapCache.putBitmap(url_bg, response); } }, 0, 0, Config.RGB_565, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { // imageView.setImageResource(R.drawable.default_image); } }); imageRequest_bg.setTag("imageRequest_bg"); AppContext.getHttpQueues().add(imageRequest_bg); AppContext.getHttpQueues().start(); final String url_img = productlist.get(k).getImg_post(); ImageRequest imageRequest_img = new ImageRequest( url_img, new Response.Listener<Bitmap>() { @Override public void onResponse(Bitmap response) { bitmapCache.putBitmap(url_img, response); } }, 0, 0, Config.RGB_565, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { // imageView.setImageResource(R.drawable.default_image); } }); imageRequest_img.setTag("imageRequest_img"); AppContext.getHttpQueues().add(imageRequest_img); AppContext.getHttpQueues().start(); } } } catch (JSONException e) { e.printStackTrace(); } } } /** * 加载商品列表数据 */ private void initData() { net_state = BABY; try { boolean YesOrNo = appContext.getProductList(WelcomeActivity.this, 1, 4, networkHelper); if (!YesOrNo) { // 如果没联网 Toast.makeText(WelcomeActivity.this, "请检查网络连接", Toast.LENGTH_SHORT) .show(); net_state = -1; } } catch (Exception e) { e.printStackTrace(); net_state = -1; } } }
src/com/itboye/banma/welcome/WelcomeActivity.java
package com.itboye.banma.welcome; import java.util.ArrayList; import java.util.List; import org.json.JSONException; import org.json.JSONObject; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.ImageRequest; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.itboye.banma.R; import com.itboye.banma.activities.HomePageActivity; import com.itboye.banma.api.StrUIDataListener; import com.itboye.banma.api.StrVolleyInterface; import com.itboye.banma.app.AppContext; import com.itboye.banma.app.Constant; import com.itboye.banma.entity.ProductItem; import com.itboye.banma.service.TokenIntentService; import com.itboye.banma.utils.BitmapCache; import com.itboye.banma.utils.BitmapCacheHomageImage; import com.itboye.banma.utils.SharedConfig; import com.umeng.analytics.AnalyticsConfig; import com.umeng.analytics.MobclickAgent; import com.umeng.message.PushAgent; import com.umeng.message.UmengRegistrar; import android.os.Bundle; import android.os.Handler; import android.support.v4.view.PagerAdapter; import android.support.v4.view.ViewPager; import android.support.v4.view.ViewPager.OnPageChangeListener; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.view.animation.Animation.AnimationListener; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.Toast; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; public class WelcomeActivity extends Activity implements StrUIDataListener, OnPageChangeListener, OnClickListener { private final int GETTOKEN = 0; //获取Token private final int BABY = 1; //获取商品 private View view; private Context context; private ViewPager viewPager; private PagerAdapter pagerAdapter; private Button startButton,enter; private LinearLayout indicatorLayout; private ArrayList<View> views; private ImageView[] indicators = null; private int[] images; private AppContext appContext; private StrVolleyInterface networkHelper; private Animation animation; private int net_state = -1; Intent intent; private String nextBck; //传到首页暂时充当背景图片的地址,根据这个地址可以再缓存中找到图片 @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); view = View.inflate(this, R.layout.activity_welcome, null); setContentView(view); context = this; appContext = (AppContext) getApplication(); networkHelper = new StrVolleyInterface(this); networkHelper.setStrUIDataListener(this); //友盟启动 MobclickAgent.updateOnlineConfig( this ); //加密处理 AnalyticsConfig.enableEncrypt(true); MobclickAgent.setDebugMode( true ); //开启推送服务 PushAgent mPushAgent = PushAgent.getInstance(getApplicationContext()); mPushAgent.enable(); //统计应用启动次数 PushAgent.getInstance(getApplicationContext()).onAppStart(); //获取测试设备的Device Token。 String device_token = UmengRegistrar.getRegistrationId(this); System.out.println(device_token+"设备"); images = new int[] { R.drawable.welcome_01, R.drawable.welcome2, }; initView(); initAnimation(); } //友盟统计 @Override protected void onResume() { super.onResume(); MobclickAgent.onResume(this); } public void onPause() { super.onPause(); MobclickAgent.onPause(this); } private void initView() { net_state = GETTOKEN; //请求token if (appContext.isNetworkConnected()==false) { Toast.makeText(WelcomeActivity.this, "请检查网络是否连接", Toast.LENGTH_LONG).show(); } try { appContext.getToken(WelcomeActivity.this, "client_credentials", "by559a8de1c325c1", "aedd16f80c192661016eebe3ac35a6e7",networkHelper); } catch (Exception e) { // Toast.makeText(WelcomeActivity.this, "访问异常" + e, // Toast.LENGTH_LONG).show(); e.printStackTrace(); Log.v("获取token异常",e+"" ); } viewPager = (ViewPager) view.findViewById(R.id.viewpage); startButton = (Button) view.findViewById(R.id.start_Button); startButton.setOnClickListener(WelcomeActivity.this); enter=(Button)view.findViewById(R.id.enter_app); enter.setOnClickListener(WelcomeActivity.this); indicatorLayout = (LinearLayout) view.findViewById(R.id.indicator); views = new ArrayList<View>(); indicators = new ImageView[images.length]; for (int i = 0; i < images.length; i++) { ImageView imageView = new ImageView(context); imageView.setBackgroundResource(images[i]); views.add(imageView); indicators[i] = new ImageView(context); indicators[i].setBackgroundResource(R.drawable.indicators_default); if (i == 0) { indicators[i].setBackgroundResource(R.drawable.indicators_now); } indicatorLayout.addView(indicators[i]); } pagerAdapter = new BasePagerAdapter(views); viewPager.setAdapter(pagerAdapter); viewPager.setOnPageChangeListener(WelcomeActivity.this); } private void initAnimation() { animation = AnimationUtils.loadAnimation(this, R.anim.alpha); view.startAnimation(animation); animation.setAnimationListener(new AnimationListener() { @Override public void onAnimationStart(Animation arg0) { } @Override public void onAnimationRepeat(Animation arg0) { } @Override public void onAnimationEnd(Animation arg0) { new Handler().postDelayed(new Runnable() { @Override public void run() { /*if (first) { intent = new Intent(AppStartActivity.this, WelcomeActivity.class); } else { intent = new Intent(AppStartActivity.this, HomePageActivity.class); }*/ intent = new Intent(WelcomeActivity.this, HomePageActivity.class); intent.putExtra("nextBck", nextBck); startActivity(intent); overridePendingTransition(R.anim.s_in_from_right, R.anim.s_out_to_left); WelcomeActivity.this.finish(); } }, 650); } }); } @Override public void onClick(View v) { if (v.getId() == R.id.start_Button) { new SharedConfig(this); SharedPreferences shared = SharedConfig.GetConfig(); Editor editor = shared.edit(); editor.putBoolean("First", false); editor.commit(); intent = new Intent(WelcomeActivity.this, HomePageActivity.class); intent.putExtra("nextBck", nextBck); startActivity(intent); overridePendingTransition(R.anim.s_in_from_right, R.anim.s_out_to_left); WelcomeActivity.this.finish(); } if (v.getId() == R.id.enter_app) { new SharedConfig(this); SharedPreferences shared = SharedConfig.GetConfig(); Editor editor = shared.edit(); editor.putBoolean("First", false); editor.commit(); intent = new Intent(WelcomeActivity.this, HomePageActivity.class); intent.putExtra("nextBck", nextBck); startActivity(intent); overridePendingTransition(R.anim.s_in_from_right, R.anim.s_out_to_left); WelcomeActivity.this.finish(); } } @Override public void onPageScrollStateChanged(int arg0) { // TODO Auto-generated method stub } @Override public void onPageScrolled(int arg0, float arg1, int arg2) { // TODO Auto-generated method stub } @Override public void onPageSelected(int arg0) { if (arg0 == indicators.length - 1) { startButton.setVisibility(View.VISIBLE); enter.setVisibility(View.GONE); } else { startButton.setVisibility(View.GONE); enter.setVisibility(View.VISIBLE); } for (int i = 0; i < indicators.length; i++) { indicators[i] .setBackgroundResource(R.drawable.indicators_default); if (arg0 == i) { indicators[arg0].setBackgroundResource(R.drawable.indicators_now); } } } @Override public void onErrorHappened(VolleyError error) { // TODO Auto-generated method stub // Toast.makeText(WelcomeActivity.this, "加载失败" + error, Toast.LENGTH_LONG) // .show(); this.startService(new Intent(this,TokenIntentService.class)); AppContext.setTokenSuccess(false); net_state = -1; Log.v("获取token",error.toString() ); } @Override public void onDataChanged(String data) { // TODO Auto-generated method stub String access_token = null; JSONObject jsonObject = null; int code = -1; try { jsonObject=new JSONObject(data); code = jsonObject.getInt("code"); } catch (JSONException e1) { e1.printStackTrace(); } if(net_state == GETTOKEN){ if (code == 0) { try { JSONObject tempdata=(JSONObject) jsonObject.get("data"); access_token = tempdata.getString("access_token"); Log.v("获取token",access_token+"1"); AppContext.setAccess_token(access_token); AppContext.setTokenSuccess(true); this.startService(new Intent(this,TokenIntentService.class)); } catch (JSONException e) { e.printStackTrace(); } initData(); //现提前加载几个商品,免得进入主页加载过慢 /*Toast.makeText(WelcomeActivity.this, "获取token成功:" + access_token, Toast.LENGTH_LONG) .show();*/ } else { net_state = -1; AppContext.setTokenSuccess(false); this.startService(new Intent(this,TokenIntentService.class)); // Toast.makeText(WelcomeActivity.this, "获取token失败:code=" + code, Toast.LENGTH_LONG) // .show(); } }else if(net_state == BABY){ Gson gson = new Gson(); String producData; try { final BitmapCacheHomageImage bitmapCache = new BitmapCacheHomageImage(); producData = jsonObject.getString("data"); JSONObject jsondata = new JSONObject(producData); String producList = jsondata.getString("list"); List<ProductItem> productlist = gson.fromJson(producList, new TypeToken<List<ProductItem>>() { }.getType()); if (productlist != null) { nextBck = productlist.get(0).getImg_post_bg(); for(int k=0; k<productlist.size(); k++){ final String url_bg = productlist.get(k).getImg_post_bg(); ImageRequest imageRequest_bg = new ImageRequest( url_bg, new Response.Listener<Bitmap>() { @Override public void onResponse(Bitmap response) { bitmapCache.putBitmap(url_bg, response); } }, 0, 0, Config.RGB_565, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { // imageView.setImageResource(R.drawable.default_image); } }); imageRequest_bg.setTag("imageRequest_bg"); AppContext.getHttpQueues().add(imageRequest_bg); AppContext.getHttpQueues().start(); final String url_img = productlist.get(k).getImg_post(); ImageRequest imageRequest_img = new ImageRequest( url_img, new Response.Listener<Bitmap>() { @Override public void onResponse(Bitmap response) { bitmapCache.putBitmap(url_img, response); } }, 0, 0, Config.RGB_565, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { // imageView.setImageResource(R.drawable.default_image); } }); imageRequest_img.setTag("imageRequest_img"); AppContext.getHttpQueues().add(imageRequest_img); AppContext.getHttpQueues().start(); } } } catch (JSONException e) { e.printStackTrace(); } } } /** * 加载商品列表数据 */ private void initData() { net_state = BABY; try { boolean YesOrNo = appContext.getProductList(WelcomeActivity.this, 1, 4, networkHelper); if (!YesOrNo) { // 如果没联网 Toast.makeText(WelcomeActivity.this, "请检查网络连接", Toast.LENGTH_SHORT) .show(); net_state = -1; } } catch (Exception e) { e.printStackTrace(); net_state = -1; } } }
首页启动加个判定
src/com/itboye/banma/welcome/WelcomeActivity.java
首页启动加个判定
Java
apache-2.0
974fe98cf39b36e294ca7f9de154b77d22e910e2
0
ctripcorp/x-pipe,ctripcorp/x-pipe,ctripcorp/x-pipe,ctripcorp/x-pipe,ctripcorp/x-pipe,ctripcorp/x-pipe
package com.ctrip.xpipe.redis.keeper.impl; import com.ctrip.xpipe.api.endpoint.Endpoint; import com.ctrip.xpipe.api.lifecycle.Releasable; import com.ctrip.xpipe.api.observer.Observer; import com.ctrip.xpipe.api.server.PARTIAL_STATE; import com.ctrip.xpipe.concurrent.AbstractExceptionLogTask; import com.ctrip.xpipe.netty.filechannel.ReferenceFileRegion; import com.ctrip.xpipe.redis.core.protocal.CAPA; import com.ctrip.xpipe.redis.core.protocal.cmd.DefaultPsync; import com.ctrip.xpipe.redis.core.protocal.protocal.EofType; import com.ctrip.xpipe.redis.core.protocal.protocal.SimpleStringParser; import com.ctrip.xpipe.redis.core.redis.operation.RedisOp; import com.ctrip.xpipe.redis.core.redis.operation.RedisOpType; import com.ctrip.xpipe.redis.core.store.*; import com.ctrip.xpipe.redis.keeper.RedisClient; import com.ctrip.xpipe.redis.keeper.RedisKeeperServer; import com.ctrip.xpipe.redis.keeper.RedisSlave; import com.ctrip.xpipe.redis.keeper.SLAVE_STATE; import com.ctrip.xpipe.redis.keeper.exception.RedisKeeperRuntimeException; import com.ctrip.xpipe.utils.*; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; import io.netty.buffer.ByteBuf; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Set; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; /** * @author wenchao.meng * * May 20, 2016 4:34:09 PM */ public class DefaultRedisSlave implements RedisSlave { private final static Logger logger = LoggerFactory.getLogger(DefaultRedisSlave.class); public static final String KEY_RDB_DUMP_MAX_WAIT_MILLI = "rdbDumpMaxWaitMilli"; private Long replAckOff; private Long replAckTime = System.currentTimeMillis(); private SLAVE_STATE slaveState; private PARTIAL_STATE partialState = PARTIAL_STATE.UNKNOWN; private Long rdbFileOffset; private ReplicationProgress<?> progressAfterRdb; private EofType eofType; private ScheduledExecutorService scheduled; private ScheduledFuture<?> pingFuture, waitTimeoutFuture; private static final int pingIntervalMilli = 1000; private int rdbDumpMaxWaitMilli = Integer.parseInt(System.getProperty(KEY_RDB_DUMP_MAX_WAIT_MILLI, "1800000"));//half an hour private int waitForPsyncProcessedTimeoutMilli = 10000; private volatile boolean putOnLineOnAck = false; private ExecutorService psyncExecutor; private RedisClient<RedisKeeperServer> redisClient; private AtomicBoolean writingCommands = new AtomicBoolean(false); private ChannelFutureListener writeExceptionListener = new ChannelFutureListener() { private AtomicLong atomicLong = new AtomicLong(0); @Override public void operationComplete(ChannelFuture future) throws Exception { if(!future.isSuccess()){ long failCount = atomicLong.incrementAndGet(); //avoid write too much error msg if((failCount & (failCount -1)) == 0){ getLogger().error("[operationComplete][write fail]" +failCount + "," + DefaultRedisSlave.this, future.cause()); } } } }; private CloseState closeState = new CloseState(); private SettableFuture<Boolean> psyncProcessed = SettableFuture.create(); public DefaultRedisSlave(RedisClient<RedisKeeperServer> redisClient){ this.redisClient = redisClient; this.setSlaveListeningPort(redisClient.getSlaveListeningPort()); this.redisClient.addChannelCloseReleaseResources(this); initExecutor(((DefaultRedisClient)redisClient).channel); } private void initExecutor(Channel channel) { String threadPrefix = buildThreadPrefix(channel); ClusterId clusterId = redisClient.getRedisServer().getClusterId(); ShardId shardId = redisClient.getRedisServer().getShardId(); psyncExecutor = Executors.newSingleThreadExecutor(ClusterShardAwareThreadFactory.create(clusterId, shardId, threadPrefix)); scheduled = Executors.newScheduledThreadPool(1, ClusterShardAwareThreadFactory.create(clusterId, shardId, threadPrefix)); } protected String buildThreadPrefix(Channel channel) { String getRemoteIpLocalPort = ChannelUtil.getRemoteAddr(channel); return "RedisClientPsync-" + getRemoteIpLocalPort; } @Override public void waitForRdbDumping() { if(this.slaveState == SLAVE_STATE.REDIS_REPL_WAIT_RDB_DUMPING){ getLogger().info("[waitForRdbDumping][already waiting]{}", this); return; } this.slaveState = SLAVE_STATE.REDIS_REPL_WAIT_RDB_DUMPING; this.waitForRdb(); } @Override public void waitForGtidParse() { if(this.slaveState == SLAVE_STATE.REDIS_REPL_WAIT_RDB_GTIDSET){ getLogger().info("[waitForGtidParse][already waiting]{}", this); return; } this.slaveState = SLAVE_STATE.REDIS_REPL_WAIT_RDB_GTIDSET; if (null == pingFuture || pingFuture.isDone()) { waitForRdb(); } else { getLogger().info("[waitForGtidParse][already start wait]{}", this); } } private void waitForRdb() { getLogger().info("[waitForRdb][begin ping]{}", this); pingFuture = scheduled.scheduleAtFixedRate(new Runnable() { @Override public void run() { try{ sendMessage("\n".getBytes()); }catch(Exception e){ getLogger().error("[run][sendPing]" + redisClient, e); } } }, pingIntervalMilli, pingIntervalMilli, TimeUnit.MILLISECONDS); waitTimeoutFuture = scheduled.schedule(new AbstractExceptionLogTask() { @Override protected void doRun() throws IOException { getLogger().info("[waitForRdb][timeout][close slave]{}", DefaultRedisSlave.this); close(); } }, rdbDumpMaxWaitMilli, TimeUnit.MILLISECONDS); } @Override public SLAVE_STATE getSlaveState() { return this.slaveState; } @Override public void ack(Long ackOff) { if(getLogger().isDebugEnabled()){ getLogger().debug("[ack]{}, {}", this , ackOff); } if(putOnLineOnAck){ putOnLineOnAck = false; getLogger().info("[ack][put slave online]{}", this); sendCommandForFullSync(); } this.replAckOff = ackOff; this.replAckTime = System.currentTimeMillis(); } @Override public ChannelFuture writeFile(ReferenceFileRegion referenceFileRegion) { return doWriteFile(referenceFileRegion); } private ChannelFuture doWriteFile(ReferenceFileRegion referenceFileRegion) { closeState.makeSureNotClosed(); ChannelFuture future = channel().writeAndFlush(referenceFileRegion); future.addListener(writeExceptionListener); return future; } @Override public Long processedOffset() { return getAck(); } @Override public Long getAck() { return this.replAckOff; } @Override public Long getAckTime() { return this.replAckTime; } protected String buildMarkBeforeFsync(ReplicationProgress<?> rdbProgress) { return StringUtil.join(" ", DefaultPsync.FULL_SYNC, getRedisServer().getKeeperRepl().replId(), rdbProgress.getProgress().toString()); } @Override public void beginWriteRdb(EofType eofType, ReplicationProgress<?> rdbProgress) { getLogger().info("[beginWriteRdb]{}, {}", eofType, rdbProgress); closeState.makeSureOpen(); SimpleStringParser simpleStringParser = new SimpleStringParser(buildMarkBeforeFsync(rdbProgress)); getLogger().info("[setRdbFileInfo]{},{}", simpleStringParser.getPayload(), this); sendMessage(simpleStringParser.format()); if(!eofType.support(getCapas())){ getLogger().warn("[beginWriteRdb][eoftype not supported]{}, {}, {}", this, eofType, getCapas()); } partialState = PARTIAL_STATE.FULL; slaveState = SLAVE_STATE.REDIS_REPL_SEND_BULK; this.eofType = eofType; if (rdbProgress instanceof OffsetReplicationProgress) { this.progressAfterRdb = new OffsetReplicationProgress(((OffsetReplicationProgress) rdbProgress).getProgress() + 1); } else { this.progressAfterRdb = rdbProgress; } putOnLineOnAck = eofType.putOnLineOnAck(); cancelWaitRdb(); channel().writeAndFlush(eofType.getStart()); } @Override public void rdbWriteComplete() { getLogger().info("[rdbWriteComplete]{}", this); ByteBuf end = eofType.getEnd(); if(end != null){ channel().writeAndFlush(end); } if(slaveState == SLAVE_STATE.REDIS_REPL_SEND_BULK){ if(getLogger().isInfoEnabled()){ getLogger().info("[writeComplete][rdbWriteComplete]" + this); } } this.slaveState = SLAVE_STATE.REDIS_REPL_ONLINE; if(!putOnLineOnAck){ sendCommandForFullSync(); } } private void cancelWaitRdb() { if(pingFuture != null){ getLogger().info("[cancelWaitRdb][cancel ping]{}", this); pingFuture.cancel(true); } if(waitTimeoutFuture != null){ getLogger().info("[cancelWaitRdb][cancel wait dump rdb]{}", this); waitTimeoutFuture.cancel(true); } } @Override public void beginWriteCommands(ReplicationProgress<?> progress) { closeState.makeSureOpen(); try { if (writingCommands.compareAndSet(false, true)) { if(partialState == PARTIAL_STATE.UNKNOWN){ partialState = PARTIAL_STATE.PARTIAL; } getLogger().info("[beginWriteCommands]{}, {}", this, progress); slaveState = SLAVE_STATE.REDIS_REPL_ONLINE; getRedisServer().getReplicationStore().addCommandsListener(progress, this); } else { getLogger().warn("[beginWriteCommands][already writing]{}, {}", this, progress); } } catch (IOException e) { throw new RedisKeeperRuntimeException("[beginWriteCommands]" + progress + "," + this, e); } } protected void sendCommandForFullSync() { getLogger().info("[sendCommandForFullSync]{}, {}", this, progressAfterRdb); processPsyncSequentially(new AbstractExceptionLogTask() { @Override protected void doRun() throws Exception { try { beginWriteCommands(progressAfterRdb); } catch (Throwable th) { getLogger().error("[sendCommandForFullSync][failed]", th); if (DefaultRedisSlave.this.isOpen()) { getLogger().error("[sendCommandForFullSync] close slave"); DefaultRedisSlave.this.close(); } } } }); } @Override public ChannelFuture onCommand(CommandFile currentFile, long filePosition, Object cmd) { closeState.makeSureOpen(); getLogger().debug("[onCommand]{}, {}", this, cmd); Object command = cmd; if (cmd instanceof RedisOp) { if (shouldFilter((RedisOp) cmd)) { return null; } command = ((RedisOp) cmd).buildRESP(); } ChannelFuture future = channel().writeAndFlush(command); future.addListener(writeExceptionListener); return future; } private boolean shouldFilter(RedisOp redisOp) { if (RedisOpType.PUBLISH.equals(redisOp.getOpType())) { String channel = new String(redisOp.buildRawOpArgs()[4]); if (!channel.startsWith("xpipe-hetero-")) { logger.debug("publish channel: [{}] filtered", channel); return true; } } return false; } @Override public String info() { String info = ""; long lag = System.currentTimeMillis() - replAckTime; info = String.format( "ip=%s,port=%d,state=%s,offset=%d,lag=%d,remotePort=%d" , getClientIpAddress() == null ? ip() : getClientIpAddress(), getSlaveListeningPort(), slaveState != null ? slaveState.getDesc() : "null", replAckOff, lag/1000, remotePort()); return info; } @Override public String ip() { return redisClient.ip(); } @Override public PARTIAL_STATE partialState() { return partialState; } @Override public void partialSync() { partialState = PARTIAL_STATE.PARTIAL; } @Override public void processPsyncSequentially(Runnable runnable) { closeState.makeSureNotClosed(); psyncExecutor.execute(runnable); } @Override public void markPsyncProcessed() { getLogger().info("[markPsyncProcessed]{}", this); psyncProcessed.set(true); } @Override public String metaInfo() { return String.format("%s(%s:%d)", roleDesc(), ip(), getSlaveListeningPort()); } @Override public boolean supportProgress(Class<? extends ReplicationProgress<?>> clazz) { return clazz.equals(OffsetReplicationProgress.class); } private int remotePort() { Channel channel = channel(); return channel == null? 0: ((InetSocketAddress)channel.remoteAddress()).getPort(); } @Override public boolean isOpen() { return closeState.isOpen(); } @Override public void close() { close(0); } @VisibleForTesting /** * testSleepMilli is for test */ protected void close(int testSleepMilli) { getLogger().info("[close]{}", this); if(closeState.isClosed()){ getLogger().info("[close][already closed]{}", this); return; } closeState.setClosing(); psyncProcessed.addListener(new AbstractExceptionLogTask() { @Override protected void doRun() throws Exception { doRealClose(); } }, MoreExecutors.directExecutor()); synchronized (closeState) { if (closeState.isClosing()) { //for unit test if (testSleepMilli > 0) { try { TimeUnit.MILLISECONDS.sleep(testSleepMilli); } catch (InterruptedException e) { } } scheduled.schedule(new AbstractExceptionLogTask() { @Override protected void doRun() throws Exception { getLogger().info("[wait for psync processed timeout close slave]{}", DefaultRedisSlave.this); doRealClose(); } }, waitForPsyncProcessedTimeoutMilli, TimeUnit.MILLISECONDS); } } } protected void doRealClose() throws IOException { synchronized (closeState) { getLogger().info("[doRealClose]{}", this); closeState.setClosed(); redisClient.close(); psyncExecutor.shutdownNow(); scheduled.shutdownNow(); } } @Override public void beforeCommand() { } // delegate methods start public void addObserver(Observer observer) { redisClient.addObserver(observer); } public void removeObserver(Observer observer) { redisClient.removeObserver(observer); } public RedisSlave becomeSlave() { return redisClient.becomeSlave(); } @Override public RedisSlave becomeXSlave() { return redisClient.becomeSlave(); } public RedisKeeperServer getRedisServer() { return redisClient.getRedisServer(); } public void setSlaveListeningPort(int port) { redisClient.setSlaveListeningPort(port); } public int getSlaveListeningPort() { return redisClient.getSlaveListeningPort(); } @Override public void setClientIpAddress(String host) { redisClient.setClientIpAddress(host); } @Override public String getClientIpAddress() { return redisClient.getClientIpAddress(); } public void capa(CAPA capa) { redisClient.capa(capa); } @Override public Set<CAPA> getCapas() { return redisClient.getCapas(); } public String[] readCommands(ByteBuf byteBuf) { return redisClient.readCommands(byteBuf); } public Channel channel() { return redisClient.channel(); } public void sendMessage(ByteBuf byteBuf) { closeState.makeSureNotClosed(); redisClient.sendMessage(byteBuf); } public void sendMessage(byte[] bytes) { closeState.makeSureNotClosed(); redisClient.sendMessage(bytes); } public void addChannelCloseReleaseResources(Releasable releasable) { redisClient.addChannelCloseReleaseResources(releasable); } protected Logger getLogger() { return logger; } @Override public void setClientEndpoint(Endpoint endpoint) { redisClient.setClientEndpoint(endpoint); } @Override public Endpoint getClientEndpoint() { return redisClient.getClientEndpoint(); } @Override public String toString() { return this.redisClient.toString(); } @Override public void release() throws Exception { getLogger().info("[release]{}", this); close(); } @Override public boolean capaOf(CAPA capa) { return redisClient.capaOf(capa); } @Override public boolean isKeeper() { return redisClient.isKeeper(); } @Override public void setKeeper() { redisClient.setKeeper(); } @VisibleForTesting protected void setRdbDumpMaxWaitMilli(int rdbDumpMaxWaitMilli) { this.rdbDumpMaxWaitMilli = rdbDumpMaxWaitMilli; } @VisibleForTesting protected void setWaitForPsyncProcessedTimeoutMilli(int waitForPsyncProcessedTimeoutMilli) { this.waitForPsyncProcessedTimeoutMilli = waitForPsyncProcessedTimeoutMilli; } @VisibleForTesting protected CloseState getCloseState() { return closeState; } }
redis/redis-keeper/src/main/java/com/ctrip/xpipe/redis/keeper/impl/DefaultRedisSlave.java
package com.ctrip.xpipe.redis.keeper.impl; import com.ctrip.xpipe.api.endpoint.Endpoint; import com.ctrip.xpipe.api.lifecycle.Releasable; import com.ctrip.xpipe.api.observer.Observer; import com.ctrip.xpipe.api.server.PARTIAL_STATE; import com.ctrip.xpipe.concurrent.AbstractExceptionLogTask; import com.ctrip.xpipe.netty.filechannel.ReferenceFileRegion; import com.ctrip.xpipe.redis.core.protocal.CAPA; import com.ctrip.xpipe.redis.core.protocal.cmd.DefaultPsync; import com.ctrip.xpipe.redis.core.protocal.protocal.EofType; import com.ctrip.xpipe.redis.core.protocal.protocal.SimpleStringParser; import com.ctrip.xpipe.redis.core.redis.operation.RedisOp; import com.ctrip.xpipe.redis.core.store.*; import com.ctrip.xpipe.redis.keeper.RedisClient; import com.ctrip.xpipe.redis.keeper.RedisKeeperServer; import com.ctrip.xpipe.redis.keeper.RedisSlave; import com.ctrip.xpipe.redis.keeper.SLAVE_STATE; import com.ctrip.xpipe.redis.keeper.exception.RedisKeeperRuntimeException; import com.ctrip.xpipe.utils.*; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; import io.netty.buffer.ByteBuf; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Set; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; /** * @author wenchao.meng * * May 20, 2016 4:34:09 PM */ public class DefaultRedisSlave implements RedisSlave { private final static Logger logger = LoggerFactory.getLogger(DefaultRedisSlave.class); public static final String KEY_RDB_DUMP_MAX_WAIT_MILLI = "rdbDumpMaxWaitMilli"; private Long replAckOff; private Long replAckTime = System.currentTimeMillis(); private SLAVE_STATE slaveState; private PARTIAL_STATE partialState = PARTIAL_STATE.UNKNOWN; private Long rdbFileOffset; private ReplicationProgress<?> progressAfterRdb; private EofType eofType; private ScheduledExecutorService scheduled; private ScheduledFuture<?> pingFuture, waitTimeoutFuture; private static final int pingIntervalMilli = 1000; private int rdbDumpMaxWaitMilli = Integer.parseInt(System.getProperty(KEY_RDB_DUMP_MAX_WAIT_MILLI, "1800000"));//half an hour private int waitForPsyncProcessedTimeoutMilli = 10000; private volatile boolean putOnLineOnAck = false; private ExecutorService psyncExecutor; private RedisClient<RedisKeeperServer> redisClient; private AtomicBoolean writingCommands = new AtomicBoolean(false); private ChannelFutureListener writeExceptionListener = new ChannelFutureListener() { private AtomicLong atomicLong = new AtomicLong(0); @Override public void operationComplete(ChannelFuture future) throws Exception { if(!future.isSuccess()){ long failCount = atomicLong.incrementAndGet(); //avoid write too much error msg if((failCount & (failCount -1)) == 0){ getLogger().error("[operationComplete][write fail]" +failCount + "," + DefaultRedisSlave.this, future.cause()); } } } }; private CloseState closeState = new CloseState(); private SettableFuture<Boolean> psyncProcessed = SettableFuture.create(); public DefaultRedisSlave(RedisClient<RedisKeeperServer> redisClient){ this.redisClient = redisClient; this.setSlaveListeningPort(redisClient.getSlaveListeningPort()); this.redisClient.addChannelCloseReleaseResources(this); initExecutor(((DefaultRedisClient)redisClient).channel); } private void initExecutor(Channel channel) { String threadPrefix = buildThreadPrefix(channel); ClusterId clusterId = redisClient.getRedisServer().getClusterId(); ShardId shardId = redisClient.getRedisServer().getShardId(); psyncExecutor = Executors.newSingleThreadExecutor(ClusterShardAwareThreadFactory.create(clusterId, shardId, threadPrefix)); scheduled = Executors.newScheduledThreadPool(1, ClusterShardAwareThreadFactory.create(clusterId, shardId, threadPrefix)); } protected String buildThreadPrefix(Channel channel) { String getRemoteIpLocalPort = ChannelUtil.getRemoteAddr(channel); return "RedisClientPsync-" + getRemoteIpLocalPort; } @Override public void waitForRdbDumping() { if(this.slaveState == SLAVE_STATE.REDIS_REPL_WAIT_RDB_DUMPING){ getLogger().info("[waitForRdbDumping][already waiting]{}", this); return; } this.slaveState = SLAVE_STATE.REDIS_REPL_WAIT_RDB_DUMPING; this.waitForRdb(); } @Override public void waitForGtidParse() { if(this.slaveState == SLAVE_STATE.REDIS_REPL_WAIT_RDB_GTIDSET){ getLogger().info("[waitForGtidParse][already waiting]{}", this); return; } this.slaveState = SLAVE_STATE.REDIS_REPL_WAIT_RDB_GTIDSET; if (null == pingFuture || pingFuture.isDone()) { waitForRdb(); } else { getLogger().info("[waitForGtidParse][already start wait]{}", this); } } private void waitForRdb() { getLogger().info("[waitForRdb][begin ping]{}", this); pingFuture = scheduled.scheduleAtFixedRate(new Runnable() { @Override public void run() { try{ sendMessage("\n".getBytes()); }catch(Exception e){ getLogger().error("[run][sendPing]" + redisClient, e); } } }, pingIntervalMilli, pingIntervalMilli, TimeUnit.MILLISECONDS); waitTimeoutFuture = scheduled.schedule(new AbstractExceptionLogTask() { @Override protected void doRun() throws IOException { getLogger().info("[waitForRdb][timeout][close slave]{}", DefaultRedisSlave.this); close(); } }, rdbDumpMaxWaitMilli, TimeUnit.MILLISECONDS); } @Override public SLAVE_STATE getSlaveState() { return this.slaveState; } @Override public void ack(Long ackOff) { if(getLogger().isDebugEnabled()){ getLogger().debug("[ack]{}, {}", this , ackOff); } if(putOnLineOnAck){ putOnLineOnAck = false; getLogger().info("[ack][put slave online]{}", this); sendCommandForFullSync(); } this.replAckOff = ackOff; this.replAckTime = System.currentTimeMillis(); } @Override public ChannelFuture writeFile(ReferenceFileRegion referenceFileRegion) { return doWriteFile(referenceFileRegion); } private ChannelFuture doWriteFile(ReferenceFileRegion referenceFileRegion) { closeState.makeSureNotClosed(); ChannelFuture future = channel().writeAndFlush(referenceFileRegion); future.addListener(writeExceptionListener); return future; } @Override public Long processedOffset() { return getAck(); } @Override public Long getAck() { return this.replAckOff; } @Override public Long getAckTime() { return this.replAckTime; } protected String buildMarkBeforeFsync(ReplicationProgress<?> rdbProgress) { return StringUtil.join(" ", DefaultPsync.FULL_SYNC, getRedisServer().getKeeperRepl().replId(), rdbProgress.getProgress().toString()); } @Override public void beginWriteRdb(EofType eofType, ReplicationProgress<?> rdbProgress) { getLogger().info("[beginWriteRdb]{}, {}", eofType, rdbProgress); closeState.makeSureOpen(); SimpleStringParser simpleStringParser = new SimpleStringParser(buildMarkBeforeFsync(rdbProgress)); getLogger().info("[setRdbFileInfo]{},{}", simpleStringParser.getPayload(), this); sendMessage(simpleStringParser.format()); if(!eofType.support(getCapas())){ getLogger().warn("[beginWriteRdb][eoftype not supported]{}, {}, {}", this, eofType, getCapas()); } partialState = PARTIAL_STATE.FULL; slaveState = SLAVE_STATE.REDIS_REPL_SEND_BULK; this.eofType = eofType; if (rdbProgress instanceof OffsetReplicationProgress) { this.progressAfterRdb = new OffsetReplicationProgress(((OffsetReplicationProgress) rdbProgress).getProgress() + 1); } else { this.progressAfterRdb = rdbProgress; } putOnLineOnAck = eofType.putOnLineOnAck(); cancelWaitRdb(); channel().writeAndFlush(eofType.getStart()); } @Override public void rdbWriteComplete() { getLogger().info("[rdbWriteComplete]{}", this); ByteBuf end = eofType.getEnd(); if(end != null){ channel().writeAndFlush(end); } if(slaveState == SLAVE_STATE.REDIS_REPL_SEND_BULK){ if(getLogger().isInfoEnabled()){ getLogger().info("[writeComplete][rdbWriteComplete]" + this); } } this.slaveState = SLAVE_STATE.REDIS_REPL_ONLINE; if(!putOnLineOnAck){ sendCommandForFullSync(); } } private void cancelWaitRdb() { if(pingFuture != null){ getLogger().info("[cancelWaitRdb][cancel ping]{}", this); pingFuture.cancel(true); } if(waitTimeoutFuture != null){ getLogger().info("[cancelWaitRdb][cancel wait dump rdb]{}", this); waitTimeoutFuture.cancel(true); } } @Override public void beginWriteCommands(ReplicationProgress<?> progress) { closeState.makeSureOpen(); try { if (writingCommands.compareAndSet(false, true)) { if(partialState == PARTIAL_STATE.UNKNOWN){ partialState = PARTIAL_STATE.PARTIAL; } getLogger().info("[beginWriteCommands]{}, {}", this, progress); slaveState = SLAVE_STATE.REDIS_REPL_ONLINE; getRedisServer().getReplicationStore().addCommandsListener(progress, this); } else { getLogger().warn("[beginWriteCommands][already writing]{}, {}", this, progress); } } catch (IOException e) { throw new RedisKeeperRuntimeException("[beginWriteCommands]" + progress + "," + this, e); } } protected void sendCommandForFullSync() { getLogger().info("[sendCommandForFullSync]{}, {}", this, progressAfterRdb); processPsyncSequentially(new AbstractExceptionLogTask() { @Override protected void doRun() throws Exception { try { beginWriteCommands(progressAfterRdb); } catch (Throwable th) { getLogger().error("[sendCommandForFullSync][failed]", th); if (DefaultRedisSlave.this.isOpen()) { getLogger().error("[sendCommandForFullSync] close slave"); DefaultRedisSlave.this.close(); } } } }); } @Override public ChannelFuture onCommand(CommandFile currentFile, long filePosition, Object cmd) { closeState.makeSureOpen(); getLogger().debug("[onCommand]{}, {}", this, cmd); Object command = cmd; if (cmd instanceof RedisOp) { command = ((RedisOp) cmd).buildRESP(); } ChannelFuture future = channel().writeAndFlush(command); future.addListener(writeExceptionListener); return future; } @Override public String info() { String info = ""; long lag = System.currentTimeMillis() - replAckTime; info = String.format( "ip=%s,port=%d,state=%s,offset=%d,lag=%d,remotePort=%d" , getClientIpAddress() == null ? ip() : getClientIpAddress(), getSlaveListeningPort(), slaveState != null ? slaveState.getDesc() : "null", replAckOff, lag/1000, remotePort()); return info; } @Override public String ip() { return redisClient.ip(); } @Override public PARTIAL_STATE partialState() { return partialState; } @Override public void partialSync() { partialState = PARTIAL_STATE.PARTIAL; } @Override public void processPsyncSequentially(Runnable runnable) { closeState.makeSureNotClosed(); psyncExecutor.execute(runnable); } @Override public void markPsyncProcessed() { getLogger().info("[markPsyncProcessed]{}", this); psyncProcessed.set(true); } @Override public String metaInfo() { return String.format("%s(%s:%d)", roleDesc(), ip(), getSlaveListeningPort()); } @Override public boolean supportProgress(Class<? extends ReplicationProgress<?>> clazz) { return clazz.equals(OffsetReplicationProgress.class); } private int remotePort() { Channel channel = channel(); return channel == null? 0: ((InetSocketAddress)channel.remoteAddress()).getPort(); } @Override public boolean isOpen() { return closeState.isOpen(); } @Override public void close() { close(0); } @VisibleForTesting /** * testSleepMilli is for test */ protected void close(int testSleepMilli) { getLogger().info("[close]{}", this); if(closeState.isClosed()){ getLogger().info("[close][already closed]{}", this); return; } closeState.setClosing(); psyncProcessed.addListener(new AbstractExceptionLogTask() { @Override protected void doRun() throws Exception { doRealClose(); } }, MoreExecutors.directExecutor()); synchronized (closeState) { if (closeState.isClosing()) { //for unit test if (testSleepMilli > 0) { try { TimeUnit.MILLISECONDS.sleep(testSleepMilli); } catch (InterruptedException e) { } } scheduled.schedule(new AbstractExceptionLogTask() { @Override protected void doRun() throws Exception { getLogger().info("[wait for psync processed timeout close slave]{}", DefaultRedisSlave.this); doRealClose(); } }, waitForPsyncProcessedTimeoutMilli, TimeUnit.MILLISECONDS); } } } protected void doRealClose() throws IOException { synchronized (closeState) { getLogger().info("[doRealClose]{}", this); closeState.setClosed(); redisClient.close(); psyncExecutor.shutdownNow(); scheduled.shutdownNow(); } } @Override public void beforeCommand() { } // delegate methods start public void addObserver(Observer observer) { redisClient.addObserver(observer); } public void removeObserver(Observer observer) { redisClient.removeObserver(observer); } public RedisSlave becomeSlave() { return redisClient.becomeSlave(); } @Override public RedisSlave becomeXSlave() { return redisClient.becomeSlave(); } public RedisKeeperServer getRedisServer() { return redisClient.getRedisServer(); } public void setSlaveListeningPort(int port) { redisClient.setSlaveListeningPort(port); } public int getSlaveListeningPort() { return redisClient.getSlaveListeningPort(); } @Override public void setClientIpAddress(String host) { redisClient.setClientIpAddress(host); } @Override public String getClientIpAddress() { return redisClient.getClientIpAddress(); } public void capa(CAPA capa) { redisClient.capa(capa); } @Override public Set<CAPA> getCapas() { return redisClient.getCapas(); } public String[] readCommands(ByteBuf byteBuf) { return redisClient.readCommands(byteBuf); } public Channel channel() { return redisClient.channel(); } public void sendMessage(ByteBuf byteBuf) { closeState.makeSureNotClosed(); redisClient.sendMessage(byteBuf); } public void sendMessage(byte[] bytes) { closeState.makeSureNotClosed(); redisClient.sendMessage(bytes); } public void addChannelCloseReleaseResources(Releasable releasable) { redisClient.addChannelCloseReleaseResources(releasable); } protected Logger getLogger() { return logger; } @Override public void setClientEndpoint(Endpoint endpoint) { redisClient.setClientEndpoint(endpoint); } @Override public Endpoint getClientEndpoint() { return redisClient.getClientEndpoint(); } @Override public String toString() { return this.redisClient.toString(); } @Override public void release() throws Exception { getLogger().info("[release]{}", this); close(); } @Override public boolean capaOf(CAPA capa) { return redisClient.capaOf(capa); } @Override public boolean isKeeper() { return redisClient.isKeeper(); } @Override public void setKeeper() { redisClient.setKeeper(); } @VisibleForTesting protected void setRdbDumpMaxWaitMilli(int rdbDumpMaxWaitMilli) { this.rdbDumpMaxWaitMilli = rdbDumpMaxWaitMilli; } @VisibleForTesting protected void setWaitForPsyncProcessedTimeoutMilli(int waitForPsyncProcessedTimeoutMilli) { this.waitForPsyncProcessedTimeoutMilli = waitForPsyncProcessedTimeoutMilli; } @VisibleForTesting protected CloseState getCloseState() { return closeState; } }
add xsync filter: filter publish command which channel not start with 'xpipe-hetero-'
redis/redis-keeper/src/main/java/com/ctrip/xpipe/redis/keeper/impl/DefaultRedisSlave.java
add xsync filter: filter publish command which channel not start with 'xpipe-hetero-'
Java
apache-2.0
92ebabe7f5eb1536ef9b7ed144d30741e3953a2e
0
contentful/vault,contentful/vault
/* * Copyright (C) 2018 Contentful GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.contentful.vault; import com.contentful.java.cda.CDAClient; import static com.contentful.vault.build.GeneratedBuildParameters.PROJECT_VERSION; public final class SyncConfig { private final CDAClient client; private final boolean invalidate; SyncConfig(Builder builder) { this.invalidate = builder.invalidate; if (builder.client == null) { if (builder.accessToken == null) { throw new IllegalStateException("Cannot create a CDA client with no access token. " + "Please set it."); } if (builder.spaceId == null) { throw new IllegalStateException("Cannot create a CDA client with no space id. " + "Please set it."); } this.client = CDAClient .builder() .setToken(builder.accessToken) .setSpace(builder.spaceId) .setEnvironment(builder.environment) .setIntegration("Vault", PROJECT_VERSION) .build(); } else { this.client = builder.client; } } public CDAClient client() { return client; } public boolean shouldInvalidate() { return invalidate; } public static Builder builder() { return new Builder(); } public static class Builder { CDAClient client; boolean invalidate; String accessToken; String spaceId; String environment; public Builder setAccessToken(String accessToken) { if (client != null) { throw new IllegalStateException( "Do not set an access token, when a client is already set. Use either space id and " + "a token or a previously created client." ); } this.accessToken = accessToken; return this; } public Builder setSpaceId(String spaceId) { if (client != null) { throw new IllegalStateException( "Do not set a space id, when a client is already set. Use either space id and " + "a token or a previously created client." ); } this.spaceId = spaceId; return this; } public Builder setEnvironment(String environment) { if (client != null) { throw new IllegalStateException( "Do not set an environment, when a client is already set. Use either environment " + "or a previously created client." ); } this.environment = environment; return this; } public Builder setClient(CDAClient client) { if (accessToken != null) { throw new IllegalStateException( "Do not set a client, when an access token is already set. Use either space id and " + "a token or a previously created client." ); } if (spaceId != null) { throw new IllegalStateException( "Do not set a client, when a space id is already set. Use either space id and " + "a token or a previously created client." ); } if (environment != null) { throw new IllegalStateException( "Do not set a client, when an environment is already set. Use either environment " + "or a previously created client." ); } this.client = client; return this; } public Builder setInvalidate(boolean invalidate) { this.invalidate = invalidate; return this; } public SyncConfig build() { return new SyncConfig(this); } } }
core/src/main/java/com/contentful/vault/SyncConfig.java
/* * Copyright (C) 2018 Contentful GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.contentful.vault; import com.contentful.java.cda.CDAClient; import static com.contentful.vault.build.GeneratedBuildParameters.PROJECT_VERSION; public final class SyncConfig { private final CDAClient client; private final boolean invalidate; SyncConfig(Builder builder) { this.invalidate = builder.invalidate; if (builder.client == null) { if (builder.accessToken == null) { throw new IllegalStateException("Cannot create a CDA client with no access token. " + "Please set it."); } if (builder.spaceId == null) { throw new IllegalStateException("Cannot create a CDA client with no space id. " + "Please set it."); } this.client = CDAClient .builder() .setToken(builder.accessToken) .setSpace(builder.spaceId) .setEnvironment(builder.environment) .setIntegration("Vault", PROJECT_VERSION) .build(); } else { this.client = builder.client; } } public CDAClient client() { return client; } public boolean shouldInvalidate() { return invalidate; } public static Builder builder() { return new Builder(); } public static class Builder { CDAClient client; boolean invalidate; String accessToken; String spaceId; String environment; public Builder setAccessToken(String accessToken) { if (client != null) { throw new IllegalStateException( "Do not set an access token, when a client is already set. Use either space id and " + "a token or a previously created client." ); } this.accessToken = accessToken; return this; } public Builder setSpaceId(String spaceId) { if (client != null) { throw new IllegalStateException( "Do not set a space id, when a client is already set. Use either space id and " + "a token or a previously created client." ); } this.spaceId = spaceId; return this; } public Builder setEnvironment(String environment) { if (client != null) { throw new IllegalStateException( "Do not set an environment, when a client is already set. User either environment " + "or a previously created client." ); } this.environment = environment; return this; } public Builder setClient(CDAClient client) { if (accessToken != null) { throw new IllegalStateException( "Do not set a client, when an access token is already set. Use either space id and " + "a token or a previously created client." ); } if (spaceId != null) { throw new IllegalStateException( "Do not set a client, when a space id is already set. Use either space id and " + "a token or a previously created client." ); } if (environment != null) { throw new IllegalStateException( "Do not set a client, when an environment is already set. User either environment " + "or a previously created client." ); } this.client = client; return this; } public Builder setInvalidate(boolean invalidate) { this.invalidate = invalidate; return this; } public SyncConfig build() { return new SyncConfig(this); } } }
Fix a typo
core/src/main/java/com/contentful/vault/SyncConfig.java
Fix a typo
Java
apache-2.0
080ad6123dabf48b1bdeeb115148009aaa2241d6
0
RavenB/lumify,TeamUDS/lumify,bings/lumify,lumifyio/lumify,TeamUDS/lumify,j-bernardo/lumify,bings/lumify,TeamUDS/lumify,Steimel/lumify,j-bernardo/lumify,bings/lumify,dvdnglnd/lumify,lumifyio/lumify,j-bernardo/lumify,RavenB/lumify,bings/lumify,RavenB/lumify,dvdnglnd/lumify,TeamUDS/lumify,Steimel/lumify,j-bernardo/lumify,lumifyio/lumify,lumifyio/lumify,TeamUDS/lumify,j-bernardo/lumify,Steimel/lumify,lumifyio/lumify,RavenB/lumify,RavenB/lumify,dvdnglnd/lumify,dvdnglnd/lumify,dvdnglnd/lumify,Steimel/lumify,Steimel/lumify,bings/lumify
package com.altamiracorp.lumify.web.routes.entity; import com.altamiracorp.lumify.core.user.User; import com.altamiracorp.lumify.core.util.LumifyLogger; import com.altamiracorp.lumify.core.util.LumifyLoggerFactory; import com.altamiracorp.lumify.web.BaseRequestHandler; import com.altamiracorp.miniweb.HandlerChain; import com.altamiracorp.securegraph.*; import com.google.inject.Inject; import org.json.JSONArray; import org.json.JSONObject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.*; import static com.altamiracorp.lumify.core.util.CollectionUtil.toList; public class EntityRelationships extends BaseRequestHandler { private static final LumifyLogger LOGGER = LumifyLoggerFactory.getLogger(EntityRelationships.class); private final Graph graph; @Inject public EntityRelationships(final Graph graph) { this.graph = graph; } @Override public void handle(HttpServletRequest request, HttpServletResponse response, HandlerChain chain) throws Exception { User user = getUser(request); long startTime = System.nanoTime(); String[] ids = request.getParameterValues("ids[]"); if (ids == null) { ids = new String[0]; } List<Object> allIds = new ArrayList<Object>(); for (int i = 0; i < ids.length; i++) { allIds.add(ids[i]); } JSONArray resultsJson = new JSONArray(); Collection<Edge> edges = getAllEdges(allIds, user.getAuthorizations()); for (Edge edge : edges) { JSONObject rel = new JSONObject(); rel.put("from", edge.getVertexId(Direction.OUT)); rel.put("to", edge.getVertexId(Direction.IN)); rel.put("relationshipType", edge.getLabel()); rel.put("id", edge.getId()); resultsJson.put(rel); } long endTime = System.nanoTime(); LOGGER.debug("Retrieved %d in %dms", edges.size(), (endTime - startTime) / 1000 / 1000); respondWithJson(response, resultsJson); } private Collection<Edge> getAllEdges(List<Object> allVertexIds, Authorizations authorizations) { Set<Edge> results = new HashSet<Edge>(); List<Vertex> vertices = toList(graph.getVertices(allVertexIds, authorizations)); // since we are checking bi-directional edges we should only have to check v1->v2 and not v2->v1 Map<String, String> checkedCombinations = new HashMap<String, String>(); for (Vertex sourceVertex : vertices) { for (Vertex destVertex : vertices) { if (checkedCombinations.containsKey(sourceVertex.getId().toString() + destVertex.getId().toString())) { continue; } Iterable<Edge> edges = sourceVertex.getEdges(destVertex, Direction.BOTH, authorizations); for (Edge edge : edges) { results.add(edge); } checkedCombinations.put(sourceVertex.getId().toString() + destVertex.getId().toString(), ""); checkedCombinations.put(destVertex.getId().toString() + sourceVertex.getId().toString(), ""); } } return results; } }
lumify-web/src/main/java/com/altamiracorp/lumify/web/routes/entity/EntityRelationships.java
package com.altamiracorp.lumify.web.routes.entity; import com.altamiracorp.lumify.core.user.User; import com.altamiracorp.lumify.core.util.LumifyLogger; import com.altamiracorp.lumify.core.util.LumifyLoggerFactory; import com.altamiracorp.lumify.web.BaseRequestHandler; import com.altamiracorp.miniweb.HandlerChain; import com.altamiracorp.securegraph.*; import com.google.inject.Inject; import org.json.JSONArray; import org.json.JSONObject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.*; import static com.altamiracorp.lumify.core.util.CollectionUtil.toList; public class EntityRelationships extends BaseRequestHandler { private static final LumifyLogger LOGGER = LumifyLoggerFactory.getLogger(EntityRelationships.class); private final Graph graph; @Inject public EntityRelationships(final Graph graph) { this.graph = graph; } @Override public void handle(HttpServletRequest request, HttpServletResponse response, HandlerChain chain) throws Exception { User user = getUser(request); long startTime = System.nanoTime(); String[] ids = request.getParameterValues("ids[]"); if (ids == null) { ids = new String[0]; } List<Object> allIds = new ArrayList<Object>(); for (int i = 0; i < ids.length; i++) { allIds.add(ids[i]); } JSONArray resultsJson = new JSONArray(); Collection<Edge> edges = getAllEdges(allIds, user.getAuthorizations()); for (Edge edge : edges) { JSONObject rel = new JSONObject(); rel.put("from", edge.getVertexId(Direction.OUT)); rel.put("to", edge.getVertexId(Direction.IN)); rel.put("relationshipType", edge.getLabel()); rel.put("id", edge.getId()); resultsJson.put(rel); } long endTime = System.nanoTime(); LOGGER.debug("Retrieved %d in %dms", edges.size(), (endTime - startTime) / 1000 / 1000); respondWithJson(response, resultsJson); } private Collection<Edge> getAllEdges(List<Object> allVertexIds, Authorizations authorizations) { Set<Edge> results = new HashSet<Edge>(); List<Vertex> vertices = toList(graph.getVertices(allVertexIds, authorizations)); for (Vertex sourceVertex : vertices) { for (Vertex destVertex : vertices) { Iterable<Edge> edges = sourceVertex.getEdges(destVertex, Direction.BOTH, authorizations); for (Edge edge : edges) { results.add(edge); } } } return results; } }
don't check both directions
lumify-web/src/main/java/com/altamiracorp/lumify/web/routes/entity/EntityRelationships.java
don't check both directions
Java
apache-2.0
d6908518d6a0d1e79db87a7d83dd13e23e10d254
0
strapdata/elassandra,strapdata/elassandra,strapdata/elassandra,vroyer/elassandra,vroyer/elassandra,strapdata/elassandra,strapdata/elassandra,vroyer/elassandra
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.security.authc.pki; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPut; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.common.socket.SocketAccess; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.file.FileRealm; import org.elasticsearch.xpack.ssl.SSLClientAuth; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.TestXPackTransportClient; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.security.KeyStore; import java.security.SecureRandom; import java.util.Locale; import java.util.Map.Entry; import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; /** * Test authentication via PKI on both REST and Transport layers */ @ClusterScope(numClientNodes = 0, supportsDedicatedMasters = false, numDataNodes = 1) public class PkiAuthenticationTests extends SecurityIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { SSLClientAuth sslClientAuth = randomBoolean() ? SSLClientAuth.REQUIRED : SSLClientAuth.OPTIONAL; Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(NetworkModule.HTTP_ENABLED.getKey(), true) .put("xpack.security.http.ssl.enabled", true) .put("xpack.security.http.ssl.client_authentication", sslClientAuth) .put("xpack.security.authc.realms.file.type", FileRealm.TYPE) .put("xpack.security.authc.realms.file.order", "0") .put("xpack.security.authc.realms.pki1.type", PkiRealm.TYPE) .put("xpack.security.authc.realms.pki1.order", "1") .put("xpack.security.authc.realms.pki1.truststore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); SecuritySettingsSource.addSecureSettings(builder, secureSettings -> secureSettings.setString("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only")); return builder.build(); } @Override protected boolean transportSSLEnabled() { return true; } public void testTransportClientCanAuthenticateViaPki() { Settings.Builder builder = Settings.builder(); addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); try (TransportClient client = createTransportClient(builder.build())) { client.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); IndexResponse response = client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); } } /** * Test uses the testclient cert which is trusted by the SSL layer BUT it is not trusted by the PKI authentication * realm */ public void testTransportClientAuthenticationFailure() { try (TransportClient client = createTransportClient(Settings.EMPTY)) { client.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); fail("transport client should not have been able to authenticate"); } catch (NoNodeAvailableException e) { assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#")); } } public void testRestAuthenticationViaPki() throws Exception { SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { String body = EntityUtils.toString(response.getEntity()); assertThat(body, containsString("\"acknowledged\":true")); } } } @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3382") public void testRestAuthenticationFailure() throws Exception { SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { assertThat(response.getStatusLine().getStatusCode(), is(401)); String body = EntityUtils.toString(response.getEntity()); assertThat(body, containsString("unable to authenticate user [Elasticsearch Test Client]")); } } } private SSLContext getRestSSLContext(String keystoreResourcePath, String password) throws Exception { SSLContext context = SSLContext.getInstance("TLS"); KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); Path store = getDataPath(keystoreResourcePath); KeyStore ks; try (InputStream in = Files.newInputStream(store)) { ks = KeyStore.getInstance("jks"); ks.load(in, password.toCharArray()); } kmf.init(ks, password.toCharArray()); TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(ks); context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); return context; } private TransportClient createTransportClient(Settings additionalSettings) { Settings clientSettings = transportClientSettings(); if (additionalSettings.getByPrefix("xpack.ssl.").isEmpty() == false) { clientSettings = clientSettings.filter(k -> k.startsWith("xpack.ssl.") == false); } Settings.Builder builder = Settings.builder().put(clientSettings, false) .put(additionalSettings) .put("cluster.name", internalCluster().getClusterName()); builder.remove(Security.USER_SETTING.getKey()); builder.remove("request.headers.Authorization"); return new TestXPackTransportClient(builder.build()); } private String getNodeUrl() { TransportAddress transportAddress = randomFrom(internalCluster().getInstance(HttpServerTransport.class) .boundAddress().boundAddresses()); return String.format(Locale.ROOT, "https://localhost:%s/", transportAddress.address().getPort()); } }
plugin/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.security.authc.pki; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPut; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.common.socket.SocketAccess; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.file.FileRealm; import org.elasticsearch.xpack.ssl.SSLClientAuth; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.TestXPackTransportClient; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.security.KeyStore; import java.security.SecureRandom; import java.util.Locale; import java.util.Map.Entry; import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; /** * Test authentication via PKI on both REST and Transport layers */ @ClusterScope(numClientNodes = 0, supportsDedicatedMasters = false, numDataNodes = 1) public class PkiAuthenticationTests extends SecurityIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { SSLClientAuth sslClientAuth = randomBoolean() ? SSLClientAuth.REQUIRED : SSLClientAuth.OPTIONAL; Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(NetworkModule.HTTP_ENABLED.getKey(), true) .put("xpack.security.http.ssl.enabled", true) .put("xpack.security.http.ssl.client_authentication", sslClientAuth) .put("xpack.security.authc.realms.file.type", FileRealm.TYPE) .put("xpack.security.authc.realms.file.order", "0") .put("xpack.security.authc.realms.pki1.type", PkiRealm.TYPE) .put("xpack.security.authc.realms.pki1.order", "1") .put("xpack.security.authc.realms.pki1.truststore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); SecuritySettingsSource.addSecureSettings(builder, secureSettings -> secureSettings.setString("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only")); return builder.build(); } @Override protected boolean transportSSLEnabled() { return true; } public void testTransportClientCanAuthenticateViaPki() { Settings.Builder builder = Settings.builder(); addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); try (TransportClient client = createTransportClient(builder.build())) { client.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); IndexResponse response = client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); } } /** * Test uses the testclient cert which is trusted by the SSL layer BUT it is not trusted by the PKI authentication * realm */ public void testTransportClientAuthenticationFailure() { try (TransportClient client = createTransportClient(Settings.EMPTY)) { client.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); fail("transport client should not have been able to authenticate"); } catch (NoNodeAvailableException e) { assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#")); } } public void testRestAuthenticationViaPki() throws Exception { SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { String body = EntityUtils.toString(response.getEntity()); assertThat(body, containsString("\"acknowledged\":true")); } } } public void testRestAuthenticationFailure() throws Exception { SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { assertThat(response.getStatusLine().getStatusCode(), is(401)); String body = EntityUtils.toString(response.getEntity()); assertThat(body, containsString("unable to authenticate user [Elasticsearch Test Client]")); } } } private SSLContext getRestSSLContext(String keystoreResourcePath, String password) throws Exception { SSLContext context = SSLContext.getInstance("TLS"); KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); Path store = getDataPath(keystoreResourcePath); KeyStore ks; try (InputStream in = Files.newInputStream(store)) { ks = KeyStore.getInstance("jks"); ks.load(in, password.toCharArray()); } kmf.init(ks, password.toCharArray()); TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(ks); context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); return context; } private TransportClient createTransportClient(Settings additionalSettings) { Settings clientSettings = transportClientSettings(); if (additionalSettings.getByPrefix("xpack.ssl.").isEmpty() == false) { clientSettings = clientSettings.filter(k -> k.startsWith("xpack.ssl.") == false); } Settings.Builder builder = Settings.builder().put(clientSettings, false) .put(additionalSettings) .put("cluster.name", internalCluster().getClusterName()); builder.remove(Security.USER_SETTING.getKey()); builder.remove("request.headers.Authorization"); return new TestXPackTransportClient(builder.build()); } private String getNodeUrl() { TransportAddress transportAddress = randomFrom(internalCluster().getInstance(HttpServerTransport.class) .boundAddress().boundAddresses()); return String.format(Locale.ROOT, "https://localhost:%s/", transportAddress.address().getPort()); } }
Mute PKI REST authentication failure test This test is failing for days, possibly due to a change in core Elasticsearch. This commit marks this test as awaits fix. Original commit: elastic/x-pack-elasticsearch@6f8fc253dbef05afae6fad6a2a523ef2123bcfca
plugin/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java
Mute PKI REST authentication failure test
Java
apache-2.0
08e66ff7ab2ef530292ff1ee1e1f4827aafee08e
0
davidmoten/rxjava-extras,davidmoten/rxjava-extras,davidmoten/rxjava-extras
package com.github.davidmoten.rx.internal.operators; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Queue; import java.util.concurrent.atomic.AtomicLong; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Producer; import rx.Subscriber; import rx.exceptions.CompositeException; import rx.exceptions.MissingBackpressureException; import rx.internal.operators.BackpressureUtils; import rx.internal.operators.NotificationLite; import rx.internal.util.RxRingBuffer; import rx.internal.util.unsafe.MpscLinkedQueue; import rx.plugins.RxJavaHooks; /** * @author David Karnokd * * @param <T> * type of observable */ public final class OrderedMerge<T> implements OnSubscribe<T> { final List<Observable<T>> sources; final Comparator<? super T> comparator; final boolean delayErrors; public static <U extends Comparable<? super U>> Observable<U> create( Collection<Observable<U>> sources) { return create(sources, false); } public static <U> Observable<U> create(Collection<Observable<U>> sources, Comparator<? super U> comparator) { return create(sources, comparator, false); } public static <U extends Comparable<? super U>> Observable<U> create( Collection<Observable<U>> sources, boolean delayErrors) { return Observable.create(new OrderedMerge<U>(sources, new Comparator<U>() { @Override public int compare(U o1, U o2) { return o1.compareTo(o2); } }, delayErrors)); } public static <U> Observable<U> create(Collection<Observable<U>> sources, Comparator<? super U> comparator, boolean delayErrors) { return Observable.create(new OrderedMerge<U>(sources, comparator, delayErrors)); } private OrderedMerge(Collection<Observable<T>> sources, Comparator<? super T> comparator, boolean delayErrors) { this.sources = sources instanceof List ? (List<Observable<T>>) sources : new ArrayList<Observable<T>>(sources); this.comparator = comparator; this.delayErrors = delayErrors; } @Override public void call(Subscriber<? super T> child) { @SuppressWarnings("unchecked") SourceSubscriber<T>[] sources = new SourceSubscriber[this.sources.size()]; MergeProducer<T> mp = new MergeProducer<T>(sources, child, comparator, delayErrors); for (int i = 0; i < sources.length; i++) { if (child.isUnsubscribed()) { return; } SourceSubscriber<T> s = new SourceSubscriber<T>(mp); sources[i] = s; child.add(s); } mp.set(0); // release contents of the array child.setProducer(mp); int i = 0; for (Observable<? extends T> source : this.sources) { if (child.isUnsubscribed()) { return; } source.unsafeSubscribe(sources[i]); i++; } } static final class MergeProducer<T> extends AtomicLong implements Producer { /** */ private static final long serialVersionUID = -812969080497027108L; final boolean delayErrors; final Comparator<? super T> comparator; @SuppressWarnings("rawtypes") final SourceSubscriber[] sources; final Subscriber<? super T> child; final Queue<Throwable> errors; boolean emitting; boolean missed; @SuppressWarnings("rawtypes") public MergeProducer(SourceSubscriber[] sources, Subscriber<? super T> child, Comparator<? super T> comparator, boolean delayErrors) { this.sources = sources; this.delayErrors = delayErrors; this.errors = new MpscLinkedQueue<Throwable>(); this.child = child; this.comparator = comparator; } @Override public void request(long n) { BackpressureUtils.getAndAddRequest(this, n); emit(); } public void error(Throwable ex) { errors.offer(ex); emit(); } public void emit() { synchronized (this) { if (emitting) { missed = true; return; } emitting = true; } // lift into local variables, just in case @SuppressWarnings("unchecked") final SourceSubscriber<T>[] sources = this.sources; final int n = sources.length; final Subscriber<? super T> child = this.child; for (;;) { if (child.isUnsubscribed()) { return; } // eagerly check for errors if (!delayErrors && !errors.isEmpty()) { child.onError(errors.poll()); return; } // the current requested long r = get(); // aggregate total emissions long e = 0; // even without request, terminal events can be fired if the // state is right if (r == 0) { int doneCount = 0; // for each source for (SourceSubscriber<T> s : sources) { // if completed earlier if (s == null) { doneCount++; } else { // or just completed if (s.done && s.queue.isEmpty()) { doneCount++; } } } // if all of them are completed if (doneCount == n) { reportErrorOrComplete(child); return; } } // until there is request while (r != 0L) { if (child.isUnsubscribed()) { return; } // eagerly check for errors if (!delayErrors && !errors.isEmpty()) { child.onError(errors.poll()); return; } // indicates that every active source has at least one value boolean fullRow = true; // indicates that at least one value is available boolean hasAtLeastOne = false; // holds the smallest of the available values T minimum = null; // indicates which source's value is taken so it can be // polled/replenished int toPoll = -1; // number of completed sources int doneCount = 0; // for each source for (int i = 0; i < n; i++) { SourceSubscriber<T> s = sources[i]; // terminated and emptied sources are ignored if (s == null) { doneCount++; continue; } // read the terminal indicator first boolean d = s.done; // peek into the queue Object o = s.queue.peek(); // no value available if (o == null) { // because it terminated? if (d) { sources[i] = null; doneCount++; continue; } // otherwise, indicate not all queues are ready fullRow = false; break; } // if we already found a value, compare it against the // current if (hasAtLeastOne) { T v = NotificationLite.getValue(o); int c = comparator.compare(minimum, v); if (c > 0) { minimum = v; toPoll = i; } } else { // this is the first value found minimum = NotificationLite.getValue(o); hasAtLeastOne = true; toPoll = i; } } // in case all of the sources completed if (doneCount == n) { reportErrorOrComplete(child); return; } // if there was a full row of available values if (fullRow) { // given the winner if (toPoll >= 0) { SourceSubscriber<T> s = sources[toPoll]; // remove the winning value from its queue s.queue.poll(); // request replenishment s.requestMore(1); } // emit the smallest child.onNext(minimum); // decrement the available request and increment the // emit count if (r != Long.MAX_VALUE) { r--; e++; } } else { // if some sources weren't ready, just quit break; } } // if there was emission, adjust the downstream request amount if (e != 0L) { addAndGet(-e); } synchronized (this) { if (!missed) { emitting = false; return; } missed = false; } } } void reportErrorOrComplete(Subscriber<? super T> child) { if (delayErrors && !errors.isEmpty()) { if (errors.size() == 1) { child.onError(errors.poll()); } else { child.onError(new CompositeException(errors)); } } else { child.onCompleted(); } } } static final class SourceSubscriber<T> extends Subscriber<T> { final RxRingBuffer queue; final MergeProducer<T> parent; volatile boolean done; SourceSubscriber(MergeProducer<T> parent) { queue = RxRingBuffer.getSpscInstance(); this.parent = parent; } @Override public void onStart() { add(queue); request(RxRingBuffer.SIZE); } public void requestMore(long n) { request(n); } @Override public void onNext(T t) { if (done) { return; } try { queue.onNext(NotificationLite.next(t)); } catch (MissingBackpressureException mbe) { try { onError(mbe); } finally { unsubscribe(); } return; } catch (IllegalStateException ex) { if (!isUnsubscribed()) { try { onError(ex); } finally { unsubscribe(); } } return; } parent.emit(); } @Override public void onError(Throwable e) { if (done) { RxJavaHooks.onError(e); return; } done = true; parent.error(e); } @Override public void onCompleted() { if (done) { return; } done = true; parent.emit(); } } }
src/main/java/com/github/davidmoten/rx/internal/operators/OrderedMerge.java
package com.github.davidmoten.rx.internal.operators; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Queue; import java.util.concurrent.atomic.AtomicLong; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Producer; import rx.Subscriber; import rx.exceptions.CompositeException; import rx.exceptions.MissingBackpressureException; import rx.internal.operators.BackpressureUtils; import rx.internal.operators.NotificationLite; import rx.internal.util.RxRingBuffer; import rx.internal.util.unsafe.MpscLinkedQueue; /** * @author David Karnokd * * @param <T> * type of observable */ public final class OrderedMerge<T> implements OnSubscribe<T> { final List<Observable<T>> sources; final Comparator<? super T> comparator; final boolean delayErrors; public static <U extends Comparable<? super U>> Observable<U> create( Collection<Observable<U>> sources) { return create(sources, false); } public static <U> Observable<U> create(Collection<Observable<U>> sources, Comparator<? super U> comparator) { return create(sources, comparator, false); } public static <U extends Comparable<? super U>> Observable<U> create( Collection<Observable<U>> sources, boolean delayErrors) { return Observable.create(new OrderedMerge<U>(sources, new Comparator<U>() { @Override public int compare(U o1, U o2) { return o1.compareTo(o2); } }, delayErrors)); } public static <U> Observable<U> create(Collection<Observable<U>> sources, Comparator<? super U> comparator, boolean delayErrors) { return Observable.create(new OrderedMerge<U>(sources, comparator, delayErrors)); } private OrderedMerge(Collection<Observable<T>> sources, Comparator<? super T> comparator, boolean delayErrors) { this.sources = sources instanceof List ? (List<Observable<T>>) sources : new ArrayList<Observable<T>>(sources); this.comparator = comparator; this.delayErrors = delayErrors; } @Override public void call(Subscriber<? super T> child) { @SuppressWarnings("unchecked") SourceSubscriber<T>[] sources = new SourceSubscriber[this.sources.size()]; MergeProducer<T> mp = new MergeProducer<T>(sources, child, comparator, delayErrors); for (int i = 0; i < sources.length; i++) { if (child.isUnsubscribed()) { return; } SourceSubscriber<T> s = new SourceSubscriber<T>(mp); sources[i] = s; child.add(s); } mp.set(0); // release contents of the array child.setProducer(mp); int i = 0; for (Observable<? extends T> source : this.sources) { if (child.isUnsubscribed()) { return; } source.unsafeSubscribe(sources[i]); i++; } } static final class MergeProducer<T> extends AtomicLong implements Producer { /** */ private static final long serialVersionUID = -812969080497027108L; final boolean delayErrors; final Comparator<? super T> comparator; @SuppressWarnings("rawtypes") final SourceSubscriber[] sources; final Subscriber<? super T> child; final Queue<Throwable> errors; boolean emitting; boolean missed; @SuppressWarnings("rawtypes") public MergeProducer(SourceSubscriber[] sources, Subscriber<? super T> child, Comparator<? super T> comparator, boolean delayErrors) { this.sources = sources; this.delayErrors = delayErrors; this.errors = new MpscLinkedQueue<Throwable>(); this.child = child; this.comparator = comparator; } @Override public void request(long n) { BackpressureUtils.getAndAddRequest(this, n); emit(); } public void error(Throwable ex) { errors.offer(ex); emit(); } public void emit() { synchronized (this) { if (emitting) { missed = true; return; } emitting = true; } // lift into local variables, just in case @SuppressWarnings("unchecked") final SourceSubscriber<T>[] sources = this.sources; final int n = sources.length; final Subscriber<? super T> child = this.child; for (;;) { if (child.isUnsubscribed()) { return; } // eagerly check for errors if (!delayErrors && !errors.isEmpty()) { child.onError(errors.poll()); return; } // the current requested long r = get(); // aggregate total emissions long e = 0; // even without request, terminal events can be fired if the // state is right if (r == 0) { int doneCount = 0; // for each source for (SourceSubscriber<T> s : sources) { // if completed earlier if (s == null) { doneCount++; } else { // or just completed if (s.done && s.queue.isEmpty()) { doneCount++; } } } // if all of them are completed if (doneCount == n) { reportErrorOrComplete(child); return; } } // until there is request while (r != 0L) { if (child.isUnsubscribed()) { return; } // eagerly check for errors if (!delayErrors && !errors.isEmpty()) { child.onError(errors.poll()); return; } // indicates that every active source has at least one value boolean fullRow = true; // indicates that at least one value is available boolean hasAtLeastOne = false; // holds the smallest of the available values T minimum = null; // indicates which source's value is taken so it can be // polled/replenished int toPoll = -1; // number of completed sources int doneCount = 0; // for each source for (int i = 0; i < n; i++) { SourceSubscriber<T> s = sources[i]; // terminated and emptied sources are ignored if (s == null) { doneCount++; continue; } // read the terminal indicator first boolean d = s.done; // peek into the queue Object o = s.queue.peek(); // no value available if (o == null) { // because it terminated? if (d) { sources[i] = null; doneCount++; continue; } // otherwise, indicate not all queues are ready fullRow = false; break; } // if we already found a value, compare it against the // current if (hasAtLeastOne) { T v = NotificationLite.getValue(o); int c = comparator.compare(minimum, v); if (c > 0) { minimum = v; toPoll = i; } } else { // this is the first value found minimum = NotificationLite.getValue(o); hasAtLeastOne = true; toPoll = i; } } // in case all of the sources completed if (doneCount == n) { reportErrorOrComplete(child); return; } // if there was a full row of available values if (fullRow) { // given the winner if (toPoll >= 0) { SourceSubscriber<T> s = sources[toPoll]; // remove the winning value from its queue s.queue.poll(); // request replenishment s.requestMore(1); } // emit the smallest child.onNext(minimum); // decrement the available request and increment the // emit count if (r != Long.MAX_VALUE) { r--; e++; } } else { // if some sources weren't ready, just quit break; } } // if there was emission, adjust the downstream request amount if (e != 0L) { addAndGet(-e); } synchronized (this) { if (!missed) { emitting = false; return; } missed = false; } } } void reportErrorOrComplete(Subscriber<? super T> child) { if (delayErrors && !errors.isEmpty()) { if (errors.size() == 1) { child.onError(errors.poll()); } else { child.onError(new CompositeException(errors)); } } else { child.onCompleted(); } } } static final class SourceSubscriber<T> extends Subscriber<T> { final RxRingBuffer queue; final MergeProducer<T> parent; volatile boolean done; public SourceSubscriber(MergeProducer<T> parent) { queue = RxRingBuffer.getSpscInstance(); this.parent = parent; } @Override public void onStart() { add(queue); request(RxRingBuffer.SIZE); } public void requestMore(long n) { request(n); } @Override public void onNext(T t) { try { queue.onNext(NotificationLite.next(t)); } catch (MissingBackpressureException mbe) { try { onError(mbe); } finally { unsubscribe(); } return; } catch (IllegalStateException ex) { if (!isUnsubscribed()) { try { onError(ex); } finally { unsubscribe(); } } return; } parent.emit(); } @Override public void onError(Throwable e) { done = true; parent.error(e); } @Override public void onCompleted() { done = true; parent.emit(); } } }
add protection to OrderedMerge.SourceSubscriber against multiple terminal events #25
src/main/java/com/github/davidmoten/rx/internal/operators/OrderedMerge.java
add protection to OrderedMerge.SourceSubscriber against multiple terminal events #25
Java
apache-2.0
37ed8b8f6724cf41f8dc6a264dbd6db9fa63afa2
0
rcordovano/autopsy,wschaeferB/autopsy,rcordovano/autopsy,wschaeferB/autopsy,esaunders/autopsy,wschaeferB/autopsy,esaunders/autopsy,rcordovano/autopsy,wschaeferB/autopsy,rcordovano/autopsy,esaunders/autopsy,esaunders/autopsy,esaunders/autopsy,rcordovano/autopsy,wschaeferB/autopsy,rcordovano/autopsy
/* * Autopsy Forensic Browser * * Copyright 2020-2020 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.texttranslation.utils; import org.apache.commons.io.FilenameUtils; import org.sleuthkit.autopsy.texttranslation.NoServiceProviderException; import org.sleuthkit.autopsy.texttranslation.TextTranslationService; import org.sleuthkit.autopsy.texttranslation.TranslationException; /** * A utility to translate file names. */ public class FileNameTranslator { /** * Translates a file name using the configured machine translation service. * * @param fileName The file name. * * @return The translation of the file name. * * @throws NoServiceProviderException If machine translation is not * configured. * @throws TranslationException If there is an error doing the * translation. */ public static String translate(String fileName) throws NoServiceProviderException, TranslationException { /* * Don't attempt translation if the characters of the file name are all * ASCII chars. * * TODO (Jira-6175): This filter prevents translation of many * non-English file names composed entirely of Latin chars. */ if (fileName.matches("^\\p{ASCII}+$")) { return ""; } TextTranslationService translator = TextTranslationService.getInstance(); String baseName = FilenameUtils.getBaseName(fileName); String translation = translator.translate(baseName); if (!translation.isEmpty()) { String extension = FilenameUtils.getExtension(fileName); if (!extension.isEmpty()) { String extensionDelimiter = (extension.isEmpty()) ? "" : "."; translation += extensionDelimiter + extension; } } return translation; } /** * Prevent instantiation of this utility class */ private FileNameTranslator() { } }
Core/src/org/sleuthkit/autopsy/texttranslation/utils/FileNameTranslator.java
/* * Autopsy Forensic Browser * * Copyright 2020-2020 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.texttranslation.utils; import org.apache.commons.io.FilenameUtils; import org.sleuthkit.autopsy.texttranslation.NoServiceProviderException; import org.sleuthkit.autopsy.texttranslation.TextTranslationService; import org.sleuthkit.autopsy.texttranslation.TranslationException; /** * A utility to translate file names. */ public class FileNameTranslator { /** * Translates a file name using the configured machine translation service. * * @param fileName The file name. * * @return The translation of the file name. * * @throws NoServiceProviderException If machine translation is not * configured. * @throws TranslationException If there is an error doing the * translation. */ public static String translate(String fileName) throws NoServiceProviderException, TranslationException { /* * Don't attempt translation if the characters of the file name are all * ASCII chars. */ if (fileName.matches("^\\p{ASCII}+$")) { return ""; } TextTranslationService translator = TextTranslationService.getInstance(); String baseName = FilenameUtils.getBaseName(fileName); String translation = translator.translate(baseName); if (!translation.isEmpty()) { String extension = FilenameUtils.getExtension(fileName); if (!extension.isEmpty()) { String extensionDelimiter = (extension.isEmpty()) ? "" : "."; translation += extensionDelimiter + extension; } } return translation; } /** * Prevent instantiation of this utility class */ private FileNameTranslator() { } }
6032 translated file names in results view
Core/src/org/sleuthkit/autopsy/texttranslation/utils/FileNameTranslator.java
6032 translated file names in results view
Java
apache-2.0
28f1f0b302bb67334cc6209ece6088e13f703404
0
zielu/GitToolBox,zielu/GitToolBox,zielu/GitToolBox
package zielu.gittoolbox.util; import org.jetbrains.annotations.NotNull; public final class CachedFactory { private static final Cached LOADING = new Cached<Object>() { @Override public boolean isLoading() { return true; } public boolean isEmpty() { return true; } @NotNull @Override public Object value() { throw new IllegalStateException("Loading - no value yet. Check with isEmpty() and isLoading()"); } }; private CachedFactory() { //do nothing } public static <T> Cached<T> loading() { return LOADING; } public static <T> Cached<T> loading(@NotNull T value) { return new CachedImpl<>(value, true); } public static <T> Cached<T> loaded(@NotNull T value) { return new CachedImpl<>(value, false); } private static class CachedImpl<T> implements Cached<T> { private final T value; private final boolean loading; private CachedImpl(T value, boolean loading) { this.value = value; this.loading = loading; } @Override public boolean isLoading() { return loading; } @Override public boolean isEmpty() { return value == null; } @NotNull @Override public T value() { return value; } } }
GitToolBox/src/main/java/zielu/gittoolbox/util/CachedFactory.java
package zielu.gittoolbox.util; import org.jetbrains.annotations.NotNull; public final class CachedFactory { private static final Cached LOADING = new Cached<Object>() { @Override public boolean isLoading() { return true; } public boolean isEmpty() { return true; } @NotNull @Override public Object value() { throw new IllegalStateException("Loading - no value yet. Check with isLoading()"); } }; private CachedFactory() { //do nothing } public static <T> Cached<T> loading() { return LOADING; } public static <T> Cached<T> loading(@NotNull T value) { return new CachedImpl<>(value, true); } public static <T> Cached<T> loaded(@NotNull T value) { return new CachedImpl<>(value, false); } private static class CachedImpl<T> implements Cached<T> { private final T value; private final boolean loading; private CachedImpl(T value, boolean loading) { this.value = value; this.loading = loading; } @Override public boolean isLoading() { return loading; } @Override public boolean isEmpty() { return value == null; } @NotNull @Override public T value() { return value; } } }
Improve exception message
GitToolBox/src/main/java/zielu/gittoolbox/util/CachedFactory.java
Improve exception message
Java
apache-2.0
17098d7e2274e75eca8f8f3ddba6131b3949d4b2
0
brix-cms/brix-cms,dsimko/brix-cms,dsimko/brix-cms,kbachl/brix-cms-backup,dsimko/brix-cms,kbachl/brix-cms,kbachl/brix-cms,brix-cms/brix-cms,brix-cms/brix-cms,kbachl/brix-cms-backup,kbachl/brix-cms
package brix.workspace.rmi; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.RemoteStub; import java.rmi.server.UnicastRemoteObject; import org.easymock.classextension.EasyMock; import org.junit.Ignore; import org.junit.Test; import brix.workspace.Workspace; import brix.workspace.WorkspaceManager; // FIXME figure out why this is failing on teamcity //@Ignore public class WorkspaceManagerRmiTest { @Test public void test() throws Exception { WorkspaceManager remote = EasyMock.createMock(WorkspaceManager.class); Workspace workspace = EasyMock.createMock(Workspace.class); EasyMock.expect(remote.createWorkspace()).andReturn(workspace); workspace.delete(); EasyMock.replay(remote, workspace); Registry registry = LocateRegistry.createRegistry(10000); ServerWorkspaceManager server = new ServerWorkspaceManager(remote); RemoteStub stub = UnicastRemoteObject.exportObject(server); registry.rebind("wm", stub); RemoteWorkspaceManager client = (RemoteWorkspaceManager)registry.lookup("wm"); WorkspaceManager local = new ClientWorkspaceManager(client); Workspace w = local.createWorkspace(); w.delete(); EasyMock.verify(remote, workspace); UnicastRemoteObject.unexportObject(server, true); } }
brix-workspace/src/test/java/brix/workspace/rmi/WorkspaceManagerRmiTest.java
package brix.workspace.rmi; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.RemoteStub; import java.rmi.server.UnicastRemoteObject; import org.easymock.classextension.EasyMock; import org.junit.Test; import brix.workspace.Workspace; import brix.workspace.WorkspaceManager; public class WorkspaceManagerRmiTest { @Test public void test() throws Exception { WorkspaceManager remote = EasyMock.createMock(WorkspaceManager.class); Workspace workspace = EasyMock.createMock(Workspace.class); EasyMock.expect(remote.createWorkspace()).andReturn(workspace); workspace.delete(); EasyMock.replay(remote, workspace); Registry registry = LocateRegistry.createRegistry(10000); ServerWorkspaceManager server = new ServerWorkspaceManager(remote); RemoteStub stub = UnicastRemoteObject.exportObject(server); registry.rebind("wm", stub); RemoteWorkspaceManager client = (RemoteWorkspaceManager)registry.lookup("wm"); WorkspaceManager local = new ClientWorkspaceManager(client); Workspace w = local.createWorkspace(); w.delete(); EasyMock.verify(remote, workspace); UnicastRemoteObject.unexportObject(server, true); } }
looks like the rmi test is failing because the stubs do not get copied from src into classes, trying to tweak maven
brix-workspace/src/test/java/brix/workspace/rmi/WorkspaceManagerRmiTest.java
looks like the rmi test is failing because the stubs do not get copied from src into classes, trying to tweak maven
Java
apache-2.0
ebcc68678ead62d08d12b06506765d7cd772b549
0
venicegeo/pz-gateway,venicegeo/pz-gateway,venicegeo/pz-gateway
/** * Copyright 2016, RadiantBlue Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package gateway.controller; import gateway.auth.AuthConnector; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import messaging.job.JobMessageFactory; import messaging.job.KafkaClientFactory; import model.data.FileRepresentation; import model.data.location.FileLocation; import model.data.location.S3FileStore; import model.job.PiazzaJobType; import model.job.type.GetJob; import model.job.type.GetResource; import model.job.type.IngestJob; import model.job.type.SearchQueryJob; import model.job.type.SearchMetadataIngestJob; import model.request.FileRequest; import model.request.PiazzaJobRequest; import model.response.ErrorResponse; import model.response.PiazzaResponse; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; import org.springframework.web.multipart.MultipartFile; import util.PiazzaLogger; import util.UUIDFactory; import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.ObjectMetadata; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; /** * Controller that handles the incoming POST requests to the Gateway service. * * @author Patrick.Doody * */ @RestController public class GatewayController { @Autowired private PiazzaLogger logger; @Autowired private UUIDFactory uuidFactory; private Producer<String, String> producer; private RestTemplate restTemplate = new RestTemplate(); private AmazonS3 s3Client; @Value("${vcap.services.pz-kafka.credentials.host}") private String KAFKA_ADDRESS; @Value("${kafka.group}") private String KAFKA_GROUP; @Value("${dispatcher.host}") private String DISPATCHER_HOST; @Value("${dispatcher.port}") private String DISPATCHER_PORT; @Value("${dispatcher.protocol}") private String DISPATCHER_PROTOCOL; @Value("${vcap.services.pz-blobstore.credentials.bucket}") private String AMAZONS3_BUCKET_NAME; @Value("${s3.domain}") private String AMAZONS3_DOMAIN; @Value("${vcap.services.pz-blobstore.credentials.access:}") private String AMAZONS3_ACCESS_KEY; @Value("${vcap.services.pz-blobstore.credentials.private:}") private String AMAZONS3_PRIVATE_KEY; /** * Initializing the Kafka Producer on Controller startup. */ @PostConstruct public void init() { producer = KafkaClientFactory.getProducer(KAFKA_ADDRESS.split(":")[0], KAFKA_ADDRESS.split(":")[1]); // Connect to S3 Bucket. Only apply credentials if they are present. if ((AMAZONS3_ACCESS_KEY.isEmpty()) && (AMAZONS3_PRIVATE_KEY.isEmpty())) { s3Client = new AmazonS3Client(); } else { BasicAWSCredentials credentials = new BasicAWSCredentials(AMAZONS3_ACCESS_KEY, AMAZONS3_PRIVATE_KEY); s3Client = new AmazonS3Client(credentials); } } @PreDestroy public void cleanup() { producer.close(); } /** * Requests a file that has been prepared by the Accessor component. This is * a separate method off of the /job endpoint because the return type is * greatly different. * * @param body * The JSON Payload of the FileRequestJob. All other Job Types * will be invalid. */ @RequestMapping(value = "/file", method = RequestMethod.POST) public ResponseEntity<byte[]> accessFile(@RequestParam(required = true) String body) throws Exception { try { // Parse the Request String FileRequest request = new ObjectMapper().readValue(body, FileRequest.class); // The Request object will contain the information needed to acquire // the file bytes. Pass this off to the Dispatcher to get the file // from the Access component. ResponseEntity<byte[]> dispatcherResponse = restTemplate.getForEntity(String.format("%s://%s:%s/file/%s", DISPATCHER_PROTOCOL, DISPATCHER_HOST, DISPATCHER_PORT, request.dataId), byte[].class); logger.log(String.format("Sent File Request Job %s to Dispatcher.", request.dataId), PiazzaLogger.INFO); // The status code of the response gets swallowed up no matter what // we do. Infer the status code that we should use based on the type // of Response the REST service responds with. return dispatcherResponse; } catch (Exception exception) { String message = String.format("Error Sending Message to Dispatcher: %s", exception.getMessage()); logger.log(message, PiazzaLogger.ERROR); throw new Exception(message); } } /** * Handles an incoming Piazza Job request by passing it along from the * external users to the internal Piazza components. * * @param json * The JSON Payload * @return Response object. */ @RequestMapping(value = "/job", method = RequestMethod.POST) public ResponseEntity<PiazzaResponse> job(@RequestParam(required = true) String body, @RequestParam(required = false) final MultipartFile file) { // Deserialize the incoming JSON to Request Model objects PiazzaJobRequest request; try { request = JobMessageFactory.parseRequestJson(body); } catch (Exception exception) { logger.log(String.format("An Invalid Job Request sent to the Gateway: %s", exception.getMessage()), PiazzaLogger.WARNING); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Error Parsing Job Request: " + exception.getMessage(), "Gateway"), HttpStatus.BAD_REQUEST); } // Authenticate and Authorize the request try { AuthConnector.verifyAuth(request); } catch (SecurityException securityEx) { logger.log("Non-authorized connection to Gateway Blocked.", PiazzaLogger.WARNING); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Authentication Error", "Gateway"), HttpStatus.UNAUTHORIZED); } // Determine if this Job is processed via synchronous REST, or via Kafka // message queues. if (isSynchronousJob(request.jobType)) { if ((request.jobType instanceof SearchQueryJob) || (request.jobType instanceof SearchMetadataIngestJob)) { return performDispatcherPost(request); } else { return performDispatcherGet(request); } } else { return performDispatcherKafka(request, file); } } /** * Determines if the Job Type is synchronous or not. Synchronous Jobs are * forwarded via REST, asynchronous jobs are forwarded via Kafka. * * @param jobType * The Job Type * @return true if synchronous based on the job contents, false if not */ private boolean isSynchronousJob(PiazzaJobType jobType) { boolean isSynchronous = false; // GET Jobs are always Synchronous. TODO: Use interfaces for this, // instead of static type checks. if ((jobType instanceof GetJob) || (jobType instanceof GetResource) || (jobType instanceof SearchQueryJob) || (jobType instanceof SearchMetadataIngestJob)) { isSynchronous = true; } return isSynchronous; } /** * Forwards a Search Query request to the internal Dispatcher component via * POST REST. * * This method is separated out from the other Dispatcher REST method * because it uses a specific POST format, instead of GETS. * * @param request * The Job Request * @return The Response from the Dispatcher */ private ResponseEntity<PiazzaResponse> performDispatcherPost(PiazzaJobRequest request) { try { String endpointString = (request.jobType instanceof SearchMetadataIngestJob) ? "searchmetadataingest" : "search"; PiazzaResponse dispatcherResponse = restTemplate.postForObject(String.format("%s://%s:%s/%s", DISPATCHER_PROTOCOL, DISPATCHER_HOST, DISPATCHER_PORT, endpointString), request.jobType, PiazzaResponse.class); logger.log(String.format("Sent Search Job For User %s to Dispatcher REST services", request.apiKey), PiazzaLogger.INFO); // The status code of the response gets swallowed up no matter what // we do. Infer the status code that we should use based on the type // of Response the REST service responds with. HttpStatus status = dispatcherResponse instanceof ErrorResponse ? HttpStatus.INTERNAL_SERVER_ERROR : HttpStatus.OK; return new ResponseEntity<PiazzaResponse>(dispatcherResponse, status); } catch (RestClientException exception) { logger.log("Could not relay message to Dispatcher.", PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Error Processing Request: " + exception.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } } /** * Forwards the Job request along to internal Piazza components via * synchronous REST calls. This is for cases where the Job is meant to be * processed synchronously and the user wants a response immediately for * their request. * * @param request * The Job Request * @return The response object */ private ResponseEntity<PiazzaResponse> performDispatcherGet(PiazzaJobRequest request) { // REST GET request to Dispatcher to fetch the status of the Job ID. // TODO: I would like a way to normalize this. String id = null, serviceName = null; if (request.jobType instanceof GetJob) { id = ((GetJob) request.jobType).getJobId(); serviceName = "job"; } else if (request.jobType instanceof GetResource) { id = ((GetResource) request.jobType).getResourceId(); serviceName = "data"; } try { PiazzaResponse dispatcherResponse = restTemplate.getForObject(String.format("%s://%s:%s/%s/%s", DISPATCHER_PROTOCOL, DISPATCHER_HOST, DISPATCHER_PORT, serviceName, id), PiazzaResponse.class); logger.log(String.format("Sent Job %s to Dispatcher %s REST services", id, serviceName), PiazzaLogger.INFO); // The status code of the response gets swallowed up no matter what // we do. Infer the status code that we should use based on the type // of Response the REST service responds with. HttpStatus status = dispatcherResponse instanceof ErrorResponse ? HttpStatus.INTERNAL_SERVER_ERROR : HttpStatus.OK; return new ResponseEntity<PiazzaResponse>(dispatcherResponse, status); } catch (RestClientException exception) { logger.log("Could not relay message to Dispatcher.", PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Error Processing Request: " + exception.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } } /** * Forwards the Job request along to the internal Piazza components via * Kafka. This is meant for Jobs that will return a job ID, and are * potentially long-running, and are thus asynchronous. * * @param request * The Job Request * @param file * The file being uploaded * @return The response object, which will contain the Job ID */ private ResponseEntity<PiazzaResponse> performDispatcherKafka(PiazzaJobRequest request, MultipartFile file) { String jobId; try { // Create a GUID for this new Job from the UUIDGen component jobId = uuidFactory.getUUID(); } catch (RestClientException exception) { logger.log("Could not connect to UUID Service for UUID.", PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Could not generate Job ID. Core Piazza Components were not found (UUIDGen).", "UUIDGen"), HttpStatus.SERVICE_UNAVAILABLE); } // If an Ingest job, persist the file to the Amazon S3 filesystem if (request.jobType instanceof IngestJob && file != null) { try { if (((IngestJob) request.jobType).getHost() == true) { // The content length must be specified. ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(file.getSize()); // Send the file. The key corresponds with the S3 file name. String fileKey = String.format("%s-%s", jobId, file.getOriginalFilename()); s3Client.putObject(AMAZONS3_BUCKET_NAME, fileKey, file.getInputStream(), metadata); // Note the S3 file path in the Ingest Job. This will be // used later to pull the file in the Ingest component. IngestJob ingestJob = (IngestJob) request.jobType; if (ingestJob.getData().getDataType() instanceof FileRepresentation) { // Attach the file to the FileLocation object FileLocation fileLocation = new S3FileStore(AMAZONS3_BUCKET_NAME, fileKey, AMAZONS3_DOMAIN); ((FileRepresentation) ingestJob.getData().getDataType()).setLocation(fileLocation); logger.log(String.format("S3 File for Job %s Persisted to %s:%s", jobId, AMAZONS3_BUCKET_NAME, fileKey), PiazzaLogger.INFO); } else { // Only FileRepresentation objects can have a file // attached to them. Otherwise, this is an invalid input // and an error needs to be thrown. return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "The uploaded file cannot be attached to the specified Data Type: " + ingestJob.getData().getDataType().getType(), "Gateway"), HttpStatus.BAD_REQUEST); } } else { return new ResponseEntity<PiazzaResponse>( new ErrorResponse( null, "Invalid input: Host parameter for an Ingest Job cannot be set to false if a file has been specified.", "Gateway"), HttpStatus.BAD_REQUEST); } } catch (AmazonServiceException awsServiceException) { logger.log(String.format("AWS S3 Upload Error on Job %s: %s", jobId, awsServiceException.getMessage()), PiazzaLogger.ERROR); awsServiceException.printStackTrace(); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "The file was rejected by Piazza persistent storage. Reason: " + awsServiceException.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } catch (Exception exception) { logger.log(String.format("Error Processing S3 Upload on Job %s: %s", jobId, exception.getMessage()), PiazzaLogger.ERROR); exception.printStackTrace(); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "An Internal error was encountered while persisting the file: " + exception.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } } // Create the Kafka Message for an incoming Job to be created. final ProducerRecord<String, String> message; try { message = JobMessageFactory.getRequestJobMessage(request, jobId); } catch (JsonProcessingException exception) { exception.printStackTrace(); logger.log(String.format("Error Creating Kafka Message for Job %s", jobId), PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(jobId, "Error Creating Message for Job", "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } // Fire off a Kafka Message and then wait for a ack response from the // kafka broker try { producer.send(message).get(); } catch (Exception exception) { logger.log(String.format("Timeout sending Message for Job %s through Kafka: %s", jobId, exception.getMessage()), PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>( new ErrorResponse( jobId, "The Gateway did not receive a response from Kafka; the request could not be forwarded along to Piazza.", "Gateway"), HttpStatus.SERVICE_UNAVAILABLE); } logger.log(String.format("Sent Job %s with Kafka Topic %s and Key %s to Dispatcher.", jobId, message.topic(), message.key()), PiazzaLogger.INFO); // Respond immediately with the new Job GUID return new ResponseEntity<PiazzaResponse>(new PiazzaResponse(jobId), HttpStatus.CREATED); } /** * Returns administrative statistics for this Gateway component. * * @return Component information */ @RequestMapping(value = "/admin/stats", method = RequestMethod.GET) public ResponseEntity<Map<String, Object>> getAdminStats() { Map<String, Object> stats = new HashMap<String, Object>(); // Get S3 Metrics stats.put("S3Location", s3Client.getBucketLocation(AMAZONS3_BUCKET_NAME)); // Get Kafka Producer Metrics Iterator<?> iterator = producer.metrics().entrySet().iterator(); while (iterator.hasNext()) { Entry<?, ?> pair = (Entry<?, ?>) iterator.next(); stats.put(pair.getKey().toString(), pair.getValue().toString()); } return new ResponseEntity<Map<String, Object>>(stats, HttpStatus.OK); } /** * Health Check. Returns OK if this component is up and running. * */ @RequestMapping(value = "/health", method = RequestMethod.GET) public String healthCheck() { return "OK"; } }
src/main/java/gateway/controller/GatewayController.java
/** * Copyright 2016, RadiantBlue Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package gateway.controller; import gateway.auth.AuthConnector; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import messaging.job.JobMessageFactory; import messaging.job.KafkaClientFactory; import model.data.FileRepresentation; import model.data.location.FileLocation; import model.data.location.S3FileStore; import model.job.PiazzaJobType; import model.job.type.GetJob; import model.job.type.GetResource; import model.job.type.IngestJob; import model.job.type.SearchQueryJob; import model.job.type.SearchMetadataIngestJob; import model.request.FileRequest; import model.request.PiazzaJobRequest; import model.response.ErrorResponse; import model.response.PiazzaResponse; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; import org.springframework.web.multipart.MultipartFile; import util.PiazzaLogger; import util.UUIDFactory; import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.ObjectMetadata; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; /** * Controller that handles the incoming POST requests to the Gateway service. * * @author Patrick.Doody * */ @RestController public class GatewayController { @Autowired private PiazzaLogger logger; @Autowired private UUIDFactory uuidFactory; private Producer<String, String> producer; private RestTemplate restTemplate = new RestTemplate(); private AmazonS3 s3Client; @Value("${vcap.services.pz-kafka.credentials.host}") private String KAFKA_ADDRESS; @Value("${kafka.group}") private String KAFKA_GROUP; @Value("${dispatcher.host}") private String DISPATCHER_HOST; @Value("${dispatcher.port}") private String DISPATCHER_PORT; @Value("${dispatcher.protocol}") private String DISPATCHER_PROTOCOL; @Value("${vcap.services.pz-blobstore.credentials.bucket}") private String AMAZONS3_BUCKET_NAME; @Value("${s3.domain}") private String AMAZONS3_DOMAIN; @Value("${vcap.services.pz-blobstore.credentials.access:}") private String AMAZONS3_ACCESS_KEY; @Value("${vcap.services.pz-blobstore.credentials.private:}") private String AMAZONS3_PRIVATE_KEY; /** * Initializing the Kafka Producer on Controller startup. */ @PostConstruct public void init() { producer = KafkaClientFactory.getProducer(KAFKA_ADDRESS.split(":")[0], KAFKA_ADDRESS.split(":")[1]); // Connect to S3 Bucket. Only apply credentials if they are present. if ((AMAZONS3_ACCESS_KEY.isEmpty()) && (AMAZONS3_PRIVATE_KEY.isEmpty())) { s3Client = new AmazonS3Client(); } else { BasicAWSCredentials credentials = new BasicAWSCredentials(AMAZONS3_ACCESS_KEY, AMAZONS3_PRIVATE_KEY); s3Client = new AmazonS3Client(credentials); } } @PreDestroy public void cleanup() { producer.close(); } /** * Requests a file that has been prepared by the Accessor component. This is * a separate method off of the /job endpoint because the return type is * greatly different. * * @param body * The JSON Payload of the FileRequestJob. All other Job Types * will be invalid. */ @RequestMapping(value = "/file", method = RequestMethod.POST) public ResponseEntity<byte[]> accessFile(@RequestParam(required = true) String body) throws Exception { try { // Parse the Request String FileRequest request = new ObjectMapper().readValue(body, FileRequest.class); // The Request object will contain the information needed to acquire // the file bytes. Pass this off to the Dispatcher to get the file // from the Access component. ResponseEntity<byte[]> dispatcherResponse = restTemplate.getForEntity(String.format("%s://%s:%s/file/%s", DISPATCHER_PROTOCOL, DISPATCHER_HOST, DISPATCHER_PORT, request.dataId), byte[].class); logger.log(String.format("Sent File Request Job %s to Dispatcher.", request.dataId), PiazzaLogger.INFO); // The status code of the response gets swallowed up no matter what // we do. Infer the status code that we should use based on the type // of Response the REST service responds with. return dispatcherResponse; } catch (Exception exception) { String message = String.format("Error Sending Message to Dispatcher: %s", exception.getMessage()); logger.log(message, PiazzaLogger.ERROR); throw new Exception(message); } } /** * Handles an incoming Piazza Job request by passing it along from the * external users to the internal Piazza components. * * @param json * The JSON Payload * @return Response object. */ @RequestMapping(value = "/job", method = RequestMethod.POST) public ResponseEntity<PiazzaResponse> job(@RequestParam(required = true) String body, @RequestParam(required = false) final MultipartFile file) { // Deserialize the incoming JSON to Request Model objects PiazzaJobRequest request; try { request = JobMessageFactory.parseRequestJson(body); } catch (Exception exception) { logger.log(String.format("An Invalid Job Request sent to the Gateway: %s", exception.getMessage()), PiazzaLogger.WARNING); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Error Parsing Job Request: " + exception.getMessage(), "Gateway"), HttpStatus.BAD_REQUEST); } // Authenticate and Authorize the request try { AuthConnector.verifyAuth(request); } catch (SecurityException securityEx) { logger.log("Non-authorized connection to Gateway Blocked.", PiazzaLogger.WARNING); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Authentication Error", "Gateway"), HttpStatus.UNAUTHORIZED); } // Determine if this Job is processed via synchronous REST, or via Kafka // message queues. if (isSynchronousJob(request.jobType)) { if ((request.jobType instanceof SearchQueryJob) || (request.jobType instanceof SearchMetadataIngestJob)) { return performDispatcherPost(request); } else { return performDispatcherGet(request); } } else { return performDispatcherKafka(request, file); } } /** * Determines if the Job Type is synchronous or not. Synchronous Jobs are * forwarded via REST, asynchronous jobs are forwarded via Kafka. * * @param jobType * The Job Type * @return true if synchronous based on the job contents, false if not */ private boolean isSynchronousJob(PiazzaJobType jobType) { boolean isSynchronous = false; // GET Jobs are always Synchronous. TODO: Use interfaces for this, // instead of static type checks. if ((jobType instanceof GetJob) || (jobType instanceof GetResource) || (jobType instanceof SearchQueryJob) || (jobType instanceof SearchMetadataIngestJob)) { isSynchronous = true; } return isSynchronous; } /** * Forwards a Search Query request to the internal Dispatcher component via * POST REST. * * This method is separated out from the other Dispatcher REST method * because it uses a specific POST format, instead of GETS. * * @param request * The Job Request * @return The Response from the Dispatcher */ private ResponseEntity<PiazzaResponse> performDispatcherPost(PiazzaJobRequest request) { try { String endpointString = (request.jobType instanceof SearchMetadataIngestJob) ? "searchmetadataingest" : "search"; PiazzaResponse dispatcherResponse = restTemplate.postForObject(String.format("%s://%s:%s/%s", DISPATCHER_PROTOCOL, DISPATCHER_HOST, DISPATCHER_PORT, endpointString), request.jobType, PiazzaResponse.class); logger.log(String.format("Sent Search Job to Dispatcher REST services"), PiazzaLogger.INFO); // The status code of the response gets swallowed up no matter what // we do. Infer the status code that we should use based on the type // of Response the REST service responds with. HttpStatus status = dispatcherResponse instanceof ErrorResponse ? HttpStatus.INTERNAL_SERVER_ERROR : HttpStatus.OK; return new ResponseEntity<PiazzaResponse>(dispatcherResponse, status); } catch (RestClientException exception) { logger.log("Could not relay message to Dispatcher.", PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Error Processing Request: " + exception.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } } /** * Forwards the Job request along to internal Piazza components via * synchronous REST calls. This is for cases where the Job is meant to be * processed synchronously and the user wants a response immediately for * their request. * * @param request * The Job Request * @return The response object */ private ResponseEntity<PiazzaResponse> performDispatcherGet(PiazzaJobRequest request) { // REST GET request to Dispatcher to fetch the status of the Job ID. // TODO: I would like a way to normalize this. String id = null, serviceName = null; if (request.jobType instanceof GetJob) { id = ((GetJob) request.jobType).getJobId(); serviceName = "job"; } else if (request.jobType instanceof GetResource) { id = ((GetResource) request.jobType).getResourceId(); serviceName = "data"; } try { PiazzaResponse dispatcherResponse = restTemplate.getForObject(String.format("%s://%s:%s/%s/%s", DISPATCHER_PROTOCOL, DISPATCHER_HOST, DISPATCHER_PORT, serviceName, id), PiazzaResponse.class); logger.log(String.format("Sent Job %s to Dispatcher %s REST services", id, serviceName), PiazzaLogger.INFO); // The status code of the response gets swallowed up no matter what // we do. Infer the status code that we should use based on the type // of Response the REST service responds with. HttpStatus status = dispatcherResponse instanceof ErrorResponse ? HttpStatus.INTERNAL_SERVER_ERROR : HttpStatus.OK; return new ResponseEntity<PiazzaResponse>(dispatcherResponse, status); } catch (RestClientException exception) { logger.log("Could not relay message to Dispatcher.", PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Error Processing Request: " + exception.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } } /** * Forwards the Job request along to the internal Piazza components via * Kafka. This is meant for Jobs that will return a job ID, and are * potentially long-running, and are thus asynchronous. * * @param request * The Job Request * @param file * The file being uploaded * @return The response object, which will contain the Job ID */ private ResponseEntity<PiazzaResponse> performDispatcherKafka(PiazzaJobRequest request, MultipartFile file) { String jobId; try { // Create a GUID for this new Job from the UUIDGen component jobId = uuidFactory.getUUID(); } catch (RestClientException exception) { logger.log("Could not connect to UUID Service for UUID.", PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "Could not generate Job ID. Core Piazza Components were not found (UUIDGen).", "UUIDGen"), HttpStatus.SERVICE_UNAVAILABLE); } // If an Ingest job, persist the file to the Amazon S3 filesystem if (request.jobType instanceof IngestJob && file != null) { try { if (((IngestJob) request.jobType).getHost() == true) { // The content length must be specified. ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(file.getSize()); // Send the file. The key corresponds with the S3 file name. String fileKey = String.format("%s-%s", jobId, file.getOriginalFilename()); s3Client.putObject(AMAZONS3_BUCKET_NAME, fileKey, file.getInputStream(), metadata); // Note the S3 file path in the Ingest Job. This will be // used later to pull the file in the Ingest component. IngestJob ingestJob = (IngestJob) request.jobType; if (ingestJob.getData().getDataType() instanceof FileRepresentation) { // Attach the file to the FileLocation object FileLocation fileLocation = new S3FileStore(AMAZONS3_BUCKET_NAME, fileKey, AMAZONS3_DOMAIN); ((FileRepresentation) ingestJob.getData().getDataType()).setLocation(fileLocation); logger.log(String.format("S3 File for Job %s Persisted to %s:%s", jobId, AMAZONS3_BUCKET_NAME, fileKey), PiazzaLogger.INFO); } else { // Only FileRepresentation objects can have a file // attached to them. Otherwise, this is an invalid input // and an error needs to be thrown. return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "The uploaded file cannot be attached to the specified Data Type: " + ingestJob.getData().getDataType().getType(), "Gateway"), HttpStatus.BAD_REQUEST); } } else { return new ResponseEntity<PiazzaResponse>( new ErrorResponse( null, "Invalid input: Host parameter for an Ingest Job cannot be set to false if a file has been specified.", "Gateway"), HttpStatus.BAD_REQUEST); } } catch (AmazonServiceException awsServiceException) { logger.log(String.format("AWS S3 Upload Error on Job %s: %s", jobId, awsServiceException.getMessage()), PiazzaLogger.ERROR); awsServiceException.printStackTrace(); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "The file was rejected by Piazza persistent storage. Reason: " + awsServiceException.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } catch (Exception exception) { logger.log(String.format("Error Processing S3 Upload on Job %s: %s", jobId, exception.getMessage()), PiazzaLogger.ERROR); exception.printStackTrace(); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(null, "An Internal error was encountered while persisting the file: " + exception.getMessage(), "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } } // Create the Kafka Message for an incoming Job to be created. final ProducerRecord<String, String> message; try { message = JobMessageFactory.getRequestJobMessage(request, jobId); } catch (JsonProcessingException exception) { exception.printStackTrace(); logger.log(String.format("Error Creating Kafka Message for Job %s", jobId), PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>(new ErrorResponse(jobId, "Error Creating Message for Job", "Gateway"), HttpStatus.INTERNAL_SERVER_ERROR); } // Fire off a Kafka Message and then wait for a ack response from the // kafka broker try { producer.send(message).get(); } catch (Exception exception) { logger.log(String.format("Timeout sending Message for Job %s through Kafka: %s", jobId, exception.getMessage()), PiazzaLogger.ERROR); return new ResponseEntity<PiazzaResponse>( new ErrorResponse( jobId, "The Gateway did not receive a response from Kafka; the request could not be forwarded along to Piazza.", "Gateway"), HttpStatus.SERVICE_UNAVAILABLE); } logger.log(String.format("Sent Job %s with Kafka Topic %s and Key %s to Dispatcher.", jobId, message.topic(), message.key()), PiazzaLogger.INFO); // Respond immediately with the new Job GUID return new ResponseEntity<PiazzaResponse>(new PiazzaResponse(jobId), HttpStatus.CREATED); } /** * Returns administrative statistics for this Gateway component. * * @return Component information */ @RequestMapping(value = "/admin/stats", method = RequestMethod.GET) public ResponseEntity<Map<String, Object>> getAdminStats() { Map<String, Object> stats = new HashMap<String, Object>(); // Get S3 Metrics stats.put("S3Location", s3Client.getBucketLocation(AMAZONS3_BUCKET_NAME)); // Get Kafka Producer Metrics Iterator<?> iterator = producer.metrics().entrySet().iterator(); while (iterator.hasNext()) { Entry<?, ?> pair = (Entry<?, ?>) iterator.next(); stats.put(pair.getKey().toString(), pair.getValue().toString()); } return new ResponseEntity<Map<String, Object>>(stats, HttpStatus.OK); } /** * Health Check. Returns OK if this component is up and running. * */ @RequestMapping(value = "/health", method = RequestMethod.GET) public String healthCheck() { return "OK"; } }
slightly more detailed logging
src/main/java/gateway/controller/GatewayController.java
slightly more detailed logging
Java
apache-2.0
1a55629290f2ed5089f815dcefb9298ef5293cfe
0
apache/camel,tadayosi/camel,pax95/camel,nikhilvibhav/camel,pax95/camel,christophd/camel,tadayosi/camel,cunningt/camel,adessaigne/camel,cunningt/camel,christophd/camel,nikhilvibhav/camel,tadayosi/camel,adessaigne/camel,christophd/camel,cunningt/camel,adessaigne/camel,apache/camel,adessaigne/camel,tdiesler/camel,cunningt/camel,tdiesler/camel,apache/camel,apache/camel,cunningt/camel,christophd/camel,christophd/camel,apache/camel,tdiesler/camel,tadayosi/camel,tdiesler/camel,pax95/camel,cunningt/camel,pax95/camel,tdiesler/camel,christophd/camel,tdiesler/camel,pax95/camel,adessaigne/camel,tadayosi/camel,nikhilvibhav/camel,apache/camel,tadayosi/camel,pax95/camel,adessaigne/camel,nikhilvibhav/camel
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.engine; import org.apache.camel.Consumer; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.spi.ExchangeFactory; import org.apache.camel.spi.ExchangeFactoryManager; import org.apache.camel.support.DefaultExchange; import org.apache.camel.support.PooledObjectFactorySupport; import org.apache.camel.util.URISupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * {@link ExchangeFactory} that creates a new {@link Exchange} instance. */ public class PrototypeExchangeFactory extends PooledObjectFactorySupport<Exchange> implements ExchangeFactory { private static final Logger LOG = LoggerFactory.getLogger(PrototypeExchangeFactory.class); final Consumer consumer; ExchangeFactoryManager exchangeFactoryManager; String routeId; public PrototypeExchangeFactory() { this.consumer = null; } public PrototypeExchangeFactory(Consumer consumer) { this.consumer = consumer; } @Override protected void doBuild() throws Exception { super.doBuild(); this.exchangeFactoryManager = camelContext.adapt(ExtendedCamelContext.class).getExchangeFactoryManager(); // force to create and load the class during build time so the JVM does not // load the class on first exchange to be created DefaultExchange dummy = new DefaultExchange(camelContext); // force message init to load classes dummy.getIn(); dummy.getIn().getHeaders(); LOG.trace("Warming up PrototypeExchangeFactory loaded class: {}", dummy.getClass().getName()); } @Override public String getRouteId() { return routeId; } @Override public void setRouteId(String routeId) { this.routeId = routeId; } @Override public Consumer getConsumer() { return consumer; } @Override public ExchangeFactory newExchangeFactory(Consumer consumer) { PrototypeExchangeFactory answer = new PrototypeExchangeFactory(consumer); answer.setStatisticsEnabled(statisticsEnabled); answer.setCapacity(capacity); answer.setCamelContext(camelContext); return answer; } @Override public Exchange acquire() { throw new UnsupportedOperationException("Not in use"); } @Override public Exchange create(boolean autoRelease) { if (statisticsEnabled) { statistics.created.increment(); } return new DefaultExchange(camelContext); } @Override public Exchange create(Endpoint fromEndpoint, boolean autoRelease) { if (statisticsEnabled) { statistics.created.increment(); } return new DefaultExchange(fromEndpoint); } @Override public boolean release(Exchange exchange) { if (statisticsEnabled) { statistics.released.increment(); } return true; } @Override public boolean isPooled() { return false; } @Override protected void doStart() throws Exception { super.doStart(); if (exchangeFactoryManager != null) { exchangeFactoryManager.addExchangeFactory(this); } } @Override protected void doStop() throws Exception { super.doStop(); if (exchangeFactoryManager != null) { exchangeFactoryManager.removeExchangeFactory(this); } logUsageSummary(LOG, "PrototypeExchangeFactory", 0); } void logUsageSummary(Logger log, String name, int pooled) { if (statisticsEnabled && consumer != null) { // only log if there is any usage long created = statistics.getCreatedCounter(); long acquired = statistics.getAcquiredCounter(); long released = statistics.getReleasedCounter(); long discarded = statistics.getDiscardedCounter(); boolean shouldLog = pooled > 0 || created > 0 || acquired > 0 || released > 0 || discarded > 0; if (shouldLog) { String id = getRouteId(); if (id == null) { id = ""; } else { id = " " + id; } String uri = consumer.getEndpoint().getEndpointBaseUri(); uri = URISupport.sanitizeUri(uri); // are there any leaks? boolean leak = created + acquired > released + discarded; if (leak) { long leaks = (created + acquired) - (released + discarded); log.warn( "{}{} ({}) usage (leaks detected: {}) [pooled: {}, created: {}, acquired: {} released: {}, discarded: {}]", name, id, uri, leaks, pooled, created, acquired, released, discarded); } else { log.info("{}{} ({}) usage [pooled: {}, created: {}, acquired: {} released: {}, discarded: {}]", name, id, uri, pooled, created, acquired, released, discarded); } } } } }
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/PrototypeExchangeFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.engine; import org.apache.camel.Consumer; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.spi.ExchangeFactory; import org.apache.camel.spi.ExchangeFactoryManager; import org.apache.camel.support.DefaultExchange; import org.apache.camel.support.PooledObjectFactorySupport; import org.apache.camel.util.URISupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * {@link ExchangeFactory} that creates a new {@link Exchange} instance. */ public class PrototypeExchangeFactory extends PooledObjectFactorySupport<Exchange> implements ExchangeFactory { private static final Logger LOG = LoggerFactory.getLogger(PrototypeExchangeFactory.class); final Consumer consumer; ExchangeFactoryManager exchangeFactoryManager; String routeId; public PrototypeExchangeFactory() { this.consumer = null; } public PrototypeExchangeFactory(Consumer consumer) { this.consumer = consumer; } @Override protected void doBuild() throws Exception { super.doBuild(); this.exchangeFactoryManager = camelContext.adapt(ExtendedCamelContext.class).getExchangeFactoryManager(); // force to create and load the class during build time so the JVM does not // load the class on first exchange to be created DefaultExchange dummy = new DefaultExchange(camelContext); // force message init to load classes dummy.getIn(); dummy.getIn().getHeaders(); LOG.trace("Warming up PrototypeExchangeFactory loaded class: {}", dummy.getClass().getName()); } @Override public String getRouteId() { return routeId; } @Override public void setRouteId(String routeId) { this.routeId = routeId; } @Override public Consumer getConsumer() { return consumer; } @Override public ExchangeFactory newExchangeFactory(Consumer consumer) { PrototypeExchangeFactory answer = new PrototypeExchangeFactory(consumer); answer.setStatisticsEnabled(statisticsEnabled); answer.setCapacity(capacity); answer.setCamelContext(camelContext); return answer; } @Override public Exchange acquire() { throw new UnsupportedOperationException("Not in use"); } @Override public Exchange create(boolean autoRelease) { if (statisticsEnabled) { statistics.created.increment(); } return new DefaultExchange(camelContext); } @Override public Exchange create(Endpoint fromEndpoint, boolean autoRelease) { if (statisticsEnabled) { statistics.created.increment(); } return new DefaultExchange(fromEndpoint); } @Override public boolean release(Exchange exchange) { if (statisticsEnabled) { statistics.released.increment(); } return true; } @Override public boolean isPooled() { return false; } @Override protected void doStart() throws Exception { super.doStart(); if (exchangeFactoryManager != null) { exchangeFactoryManager.addExchangeFactory(this); } } @Override protected void doStop() throws Exception { super.doStop(); if (exchangeFactoryManager != null) { exchangeFactoryManager.removeExchangeFactory(this); } logUsageSummary(LOG, "PrototypeExchangeFactory", 0); } void logUsageSummary(Logger log, String name, int pooled) { if (statisticsEnabled && consumer != null) { // only log if there is any usage long created = statistics.getCreatedCounter(); long acquired = statistics.getAcquiredCounter(); long released = statistics.getReleasedCounter(); long discarded = statistics.getDiscardedCounter(); boolean shouldLog = pooled > 0 || created > 0 || acquired > 0 || released > 0 || discarded > 0; if (shouldLog) { String id = getRouteId(); String uri = consumer.getEndpoint().getEndpointBaseUri(); uri = URISupport.sanitizeUri(uri); // are there any leaks? boolean leak = created + acquired > released + discarded; if (leak) { long leaks = (created + acquired) - (released + discarded); log.warn( "{} {} ({}) usage (leaks detected: {}) [pooled: {}, created: {}, acquired: {} released: {}, discarded: {}]", name, id, uri, leaks, pooled, created, acquired, released, discarded); } else { log.info("{} {} ({}) usage [pooled: {}, created: {}, acquired: {} released: {}, discarded: {}]", name, id, uri, pooled, created, acquired, released, discarded); } } } } }
Polished
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/PrototypeExchangeFactory.java
Polished
Java
apache-2.0
b7f23108ffce1451fac45dcf6ac7ff6efa44ec56
0
thammegowda/incubator-joshua,thammegowda/incubator-joshua,fhieber/incubator-joshua,thammegowda/incubator-joshua,thammegowda/incubator-joshua,thammegowda/incubator-joshua,fhieber/incubator-joshua,fhieber/incubator-joshua,thammegowda/incubator-joshua,fhieber/incubator-joshua,fhieber/incubator-joshua,thammegowda/incubator-joshua,thammegowda/incubator-joshua,fhieber/incubator-joshua,thammegowda/incubator-joshua,fhieber/incubator-joshua,fhieber/incubator-joshua,thammegowda/incubator-joshua,fhieber/incubator-joshua
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package joshua.decoder.ff.lm; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import joshua.corpus.Vocabulary; import joshua.decoder.JoshuaConfiguration; import joshua.decoder.Support; import joshua.decoder.chart_parser.SourcePath; import joshua.decoder.ff.FeatureVector; import joshua.decoder.ff.StatefulFF; import joshua.decoder.ff.lm.berkeley_lm.LMGrammarBerkeley; import joshua.decoder.ff.lm.KenLM; import joshua.decoder.ff.state_maintenance.DPState; import joshua.decoder.ff.state_maintenance.NgramDPState; import joshua.decoder.ff.tm.Rule; import joshua.decoder.hypergraph.HGNode; import joshua.decoder.segment_file.Sentence; /** * This class performs the following: * <ol> * <li>Gets the additional LM score due to combinations of small items into larger ones by using * rules * <li>Gets the LM state * <li>Gets the left-side LM state estimation score * </ol> * * @author Matt Post <[email protected]> * @author Juri Ganitkevitch <[email protected]> * @author Zhifei Li, <[email protected]> */ public class LanguageModelFF extends StatefulFF { private static int LM_INDEX = 0; public static int START_SYM_ID; public static int STOP_SYM_ID; /** * N-gram language model. We assume the language model is in ARPA format for equivalent state: * * <ol> * <li>We assume it is a backoff lm, and high-order ngram implies low-order ngram; absense of * low-order ngram implies high-order ngram</li> * <li>For a ngram, existence of backoffweight => existence a probability Two ways of dealing with * low counts: * <ul> * <li>SRILM: don't multiply zeros in for unknown words</li> * <li>Pharaoh: cap at a minimum score exp(-10), including unknown words</li> * </ul> * </li> */ protected NGramLanguageModel languageModel; /** * We always use this order of ngram, though the LMGrammar may provide higher order probability. */ protected final int ngramOrder; /* * We cache the weight of the feature since there is only one. */ protected float weight; protected String type; protected String path; /* Whether this is a class-based LM */ private boolean isClassLM; private ClassMap classMap; protected class ClassMap { private final int OOV_id = Vocabulary.getUnknownId(); private HashMap<Integer, Integer> classMap; public ClassMap(String file_name) throws IOException { this.classMap = new HashMap<Integer, Integer>(); read(file_name); } public int getClassID(int wordID) { return this.classMap.getOrDefault(wordID, OOV_id); } /** * Reads a class map from file. * * @param file_name * @throws IOException */ private void read(String file_name) throws IOException { int lineno = 0; for (String line: new joshua.util.io.LineReader(file_name, false)) { lineno++; String[] lineComp = line.trim().split("\\s+"); try { this.classMap.put(Vocabulary.id(lineComp[0]), Vocabulary.id(lineComp[1])); } catch (java.lang.ArrayIndexOutOfBoundsException e) { System.err.println(String.format("* WARNING: bad vocab line #%d '%s'", lineno, line)); } } } } public LanguageModelFF(FeatureVector weights, String[] args, JoshuaConfiguration config) { super(weights, String.format("lm_%d", LanguageModelFF.LM_INDEX++), args, config); this.type = parsedArgs.get("lm_type"); this.ngramOrder = Integer.parseInt(parsedArgs.get("lm_order")); this.path = parsedArgs.get("lm_file"); if (parsedArgs.containsKey("class_map")) try { this.isClassLM = true; this.classMap = new ClassMap(parsedArgs.get("class_map")); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // The dense feature initialization hasn't happened yet, so we have to retrieve this as sparse this.weight = weights.getSparse(name); initializeLM(); } @Override public ArrayList<String> reportDenseFeatures(int index) { denseFeatureIndex = index; ArrayList<String> names = new ArrayList<String>(); names.add(name); return names; } /** * Initializes the underlying language model. * * @param config * @param type * @param path */ public void initializeLM() { if (type.equals("kenlm")) { this.languageModel = new KenLM(ngramOrder, path); } else if (type.equals("berkeleylm")) { this.languageModel = new LMGrammarBerkeley(ngramOrder, path); } else { System.err.println(String.format("* FATAL: Invalid backend lm_type '%s' for LanguageModel", type)); System.err.println(String.format("* Permissible values for 'lm_type' are 'kenlm' and 'berkeleylm'")); System.exit(-1); } Vocabulary.registerLanguageModel(this.languageModel); Vocabulary.id(config.default_non_terminal); LanguageModelFF.START_SYM_ID = Vocabulary.id(Vocabulary.START_SYM); LanguageModelFF.STOP_SYM_ID = Vocabulary.id(Vocabulary.STOP_SYM); } public NGramLanguageModel getLM() { return this.languageModel; } public String logString() { if (languageModel != null) return String.format("%s, order %d (weight %.3f)", name, languageModel.getOrder(), weight); else return "WHOA"; } /** * Computes the features incurred along this edge. Note that these features are unweighted costs * of the feature; they are the feature cost, not the model cost, or the inner product of them. */ @Override public DPState compute(Rule rule, List<HGNode> tailNodes, int i, int j, SourcePath sourcePath, Sentence sentence, Accumulator acc) { NgramDPState newState = null; if (rule != null) { if (config.source_annotations) { // Get source side annotations and project them to the target side newState = computeTransition(getTags(rule, i, j, sentence), tailNodes, acc); } else { if (this.isClassLM) { // Use a class language model // Return target side classes newState = computeTransition(getClasses(rule), tailNodes, acc); } else { // Default LM newState = computeTransition(rule.getEnglish(), tailNodes, acc); } } } return newState; } /** * Input sentences can be tagged with information specific to the language model. This looks for * such annotations by following a word's alignments back to the source words, checking for * annotations, and replacing the surface word if such annotations are found. * */ protected int[] getTags(Rule rule, int begin, int end, Sentence sentence) { /* Very important to make a copy here, so the original rule is not modified */ int[] tokens = Arrays.copyOf(rule.getEnglish(), rule.getEnglish().length); byte[] alignments = rule.getAlignment(); // System.err.println(String.format("getTags() %s", rule.getRuleString())); /* For each target-side token, project it to each of its source-language alignments. If any of those * are annotated, take the first annotation and quit. */ if (alignments != null) { for (int i = 0; i < tokens.length; i++) { if (tokens[i] > 0) { // skip nonterminals for (int j = 0; j < alignments.length; j += 2) { if (alignments[j] == i) { String annotation = sentence.getAnnotation((int)alignments[i] + begin, "class"); if (annotation != null) { // System.err.println(String.format(" word %d source %d abs %d annotation %d/%s", // i, alignments[i], alignments[i] + begin, annotation, Vocabulary.word(annotation))); tokens[i] = Vocabulary.id(annotation); break; } } } } } } return tokens; } /** * Sets the class map if this is a class LM * @param classMap * @throws IOException */ public void setClassMap(String fileName) throws IOException { this.classMap = new ClassMap(fileName); } /** * Replace each word in a rule with the target side classes. */ protected int[] getClasses(Rule rule) { if (this.classMap == null) { System.err.println("The class map is not set. Cannot use the class LM "); System.exit(2); } /* Very important to make a copy here, so the original rule is not modified */ int[] tokens = Arrays.copyOf(rule.getEnglish(), rule.getEnglish().length); for (int i = 0; i < tokens.length; i++) { if (tokens[i] > 0 ) { tokens[i] = this.classMap.getClassID(tokens[i]); } } return tokens; } @Override public DPState computeFinal(HGNode tailNode, int i, int j, SourcePath sourcePath, Sentence sentence, Accumulator acc) { return computeFinalTransition((NgramDPState) tailNode.getDPState(stateIndex), acc); } /** * This function computes all the complete n-grams found in the rule, as well as the incomplete * n-grams on the left-hand side. */ @Override public float estimateCost(Rule rule, Sentence sentence) { float estimate = 0.0f; boolean considerIncompleteNgrams = true; int[] enWords = rule.getEnglish(); List<Integer> words = new ArrayList<Integer>(); boolean skipStart = (enWords[0] == START_SYM_ID); /* * Move through the words, accumulating language model costs each time we have an n-gram (n >= * 2), and resetting the series of words when we hit a nonterminal. */ for (int c = 0; c < enWords.length; c++) { int currentWord = enWords[c]; if (Vocabulary.nt(currentWord)) { estimate += scoreChunkLogP(words, considerIncompleteNgrams, skipStart); words.clear(); skipStart = false; } else { words.add(currentWord); } } estimate += scoreChunkLogP(words, considerIncompleteNgrams, skipStart); return weight * estimate; } /** * Estimates the future cost of a rule. For the language model feature, this is the sum of the * costs of the leftmost k-grams, k = [1..n-1]. */ @Override public float estimateFutureCost(Rule rule, DPState currentState, Sentence sentence) { NgramDPState state = (NgramDPState) currentState; float estimate = 0.0f; int[] leftContext = state.getLeftLMStateWords(); if (null != leftContext) { List<Integer> words = new ArrayList<Integer>(); for (int w : leftContext) words.add(w); boolean considerIncompleteNgrams = true; boolean skipStart = true; if (words.get(0) != START_SYM_ID) { skipStart = false; } estimate += scoreChunkLogP(words, considerIncompleteNgrams, skipStart); } return weight * estimate; } /** * Compute the cost of a rule application. The cost of applying a rule is computed by determining * the n-gram costs for all n-grams created by this rule application, and summing them. N-grams * are created when (a) terminal words in the rule string are followed by a nonterminal (b) * terminal words in the rule string are preceded by a nonterminal (c) we encounter adjacent * nonterminals. In all of these situations, the corresponding boundary words of the node in the * hypergraph represented by the nonterminal must be retrieved. * * IMPORTANT: only complete n-grams are scored. This means that hypotheses with fewer words * than the complete n-gram state remain *unscored*. This fact adds a lot of complication to the * code, including the use of the computeFinal* family of functions, which correct this fact for * sentences that are too short on the final transition. */ private NgramDPState computeTransition(int[] enWords, List<HGNode> tailNodes, Accumulator acc) { int[] current = new int[this.ngramOrder]; int[] shadow = new int[this.ngramOrder]; int ccount = 0; float transitionLogP = 0.0f; int[] left_context = null; for (int c = 0; c < enWords.length; c++) { int curID = enWords[c]; if (Vocabulary.nt(curID)) { int index = -(curID + 1); NgramDPState state = (NgramDPState) tailNodes.get(index).getDPState(stateIndex); int[] left = state.getLeftLMStateWords(); int[] right = state.getRightLMStateWords(); // Left context. for (int i = 0; i < left.length; i++) { current[ccount++] = left[i]; if (left_context == null && ccount == this.ngramOrder - 1) left_context = Arrays.copyOf(current, ccount); if (ccount == this.ngramOrder) { // Compute the current word probability, and remove it. float prob = this.languageModel.ngramLogProbability(current, this.ngramOrder); // System.err.println(String.format("-> prob(%s) = %f", Vocabulary.getWords(current), prob)); transitionLogP += prob; System.arraycopy(current, 1, shadow, 0, this.ngramOrder - 1); int[] tmp = current; current = shadow; shadow = tmp; --ccount; } } System.arraycopy(right, 0, current, ccount - right.length, right.length); } else { // terminal words current[ccount++] = curID; if (left_context == null && ccount == this.ngramOrder - 1) left_context = Arrays.copyOf(current, ccount); if (ccount == this.ngramOrder) { // Compute the current word probability, and remove it.s float prob = this.languageModel.ngramLogProbability(current, this.ngramOrder); // System.err.println(String.format("-> prob(%s) = %f", Vocabulary.getWords(current), prob)); transitionLogP += prob; System.arraycopy(current, 1, shadow, 0, this.ngramOrder - 1); int[] tmp = current; current = shadow; shadow = tmp; --ccount; } } } // acc.add(name, transitionLogP); acc.add(denseFeatureIndex, transitionLogP); if (left_context != null) { return new NgramDPState(left_context, Arrays.copyOfRange(current, ccount - this.ngramOrder + 1, ccount)); } else { int[] context = Arrays.copyOf(current, ccount); return new NgramDPState(context, context); } } /** * This function differs from regular transitions because we incorporate the cost of incomplete * left-hand ngrams, as well as including the start- and end-of-sentence markers (if they were * requested when the object was created). * * @param state the dynamic programming state * @return the final transition probability (including incomplete n-grams) */ private NgramDPState computeFinalTransition(NgramDPState state, Accumulator acc) { // System.err.println(String.format("LanguageModel::computeFinalTransition()")); float res = 0.0f; LinkedList<Integer> currentNgram = new LinkedList<Integer>(); int[] leftContext = state.getLeftLMStateWords(); int[] rightContext = state.getRightLMStateWords(); for (int i = 0; i < leftContext.length; i++) { int t = leftContext[i]; currentNgram.add(t); if (currentNgram.size() >= 2) { // start from bigram float prob = this.languageModel.ngramLogProbability(Support.toArray(currentNgram), currentNgram.size()); res += prob; } if (currentNgram.size() == this.ngramOrder) currentNgram.removeFirst(); } // Tell the accumulator // acc.add(name, res); acc.add(denseFeatureIndex, res); // State is the same return new NgramDPState(leftContext, rightContext); } /** * This function is basically a wrapper for NGramLanguageModel::sentenceLogProbability(). It * computes the probability of a phrase ("chunk"), using lower-order n-grams for the first n-1 * words. * * @param words * @param considerIncompleteNgrams * @param skipStart * @return the phrase log probability */ private float scoreChunkLogP(List<Integer> words, boolean considerIncompleteNgrams, boolean skipStart) { float score = 0.0f; if (words.size() > 0) { int startIndex; if (!considerIncompleteNgrams) { startIndex = this.ngramOrder; } else if (skipStart) { startIndex = 2; } else { startIndex = 1; } score = this.languageModel.sentenceLogProbability( Support.subIntArray(words, 0, words.size()), this.ngramOrder, startIndex); } return score; } }
src/joshua/decoder/ff/lm/LanguageModelFF.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package joshua.decoder.ff.lm; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import joshua.corpus.Vocabulary; import joshua.decoder.JoshuaConfiguration; import joshua.decoder.Support; import joshua.decoder.chart_parser.SourcePath; import joshua.decoder.ff.FeatureVector; import joshua.decoder.ff.StatefulFF; import joshua.decoder.ff.lm.berkeley_lm.LMGrammarBerkeley; import joshua.decoder.ff.lm.KenLM; import joshua.decoder.ff.state_maintenance.DPState; import joshua.decoder.ff.state_maintenance.NgramDPState; import joshua.decoder.ff.tm.Rule; import joshua.decoder.hypergraph.HGNode; import joshua.decoder.segment_file.Sentence; /** * This class performs the following: * <ol> * <li>Gets the additional LM score due to combinations of small items into larger ones by using * rules * <li>Gets the LM state * <li>Gets the left-side LM state estimation score * </ol> * * @author Matt Post <[email protected]> * @author Juri Ganitkevitch <[email protected]> * @author Zhifei Li, <[email protected]> */ public class LanguageModelFF extends StatefulFF { private static int LM_INDEX = 0; public static int START_SYM_ID; public static int STOP_SYM_ID; /** * N-gram language model. We assume the language model is in ARPA format for equivalent state: * * <ol> * <li>We assume it is a backoff lm, and high-order ngram implies low-order ngram; absense of * low-order ngram implies high-order ngram</li> * <li>For a ngram, existence of backoffweight => existence a probability Two ways of dealing with * low counts: * <ul> * <li>SRILM: don't multiply zeros in for unknown words</li> * <li>Pharaoh: cap at a minimum score exp(-10), including unknown words</li> * </ul> * </li> */ protected NGramLanguageModel languageModel; /** * We always use this order of ngram, though the LMGrammar may provide higher order probability. */ protected final int ngramOrder; /* * We cache the weight of the feature since there is only one. */ protected float weight; protected String type; protected String path; /* Whether this is a class-based LM */ private boolean isClassLM; private ClassMap classMap; protected class ClassMap { private final int OOV_id = 10; private HashMap<Integer, Integer> classMap; public ClassMap(String file_name) throws IOException { this.classMap = new HashMap<Integer, Integer>(); read(file_name); } public int getClassID(int wordID) { if (this.classMap.containsKey(wordID)) { return this.classMap.get(wordID); } else { return OOV_id; } } /** * Reads a class map from file. * * @param file_name * @throws IOException */ private void read(String file_name) throws IOException { int lineno = 0; for (String line: new joshua.util.io.LineReader(file_name, false)) { lineno++; String[] lineComp = line.trim().split("\\s+"); try { this.classMap.put(Vocabulary.id(lineComp[0]), Vocabulary.id(lineComp[1])); } catch (java.lang.ArrayIndexOutOfBoundsException e) { System.err.println(String.format("* WARNING: bad vocab line #%d '%s'", lineno, line)); } } } } public LanguageModelFF(FeatureVector weights, String[] args, JoshuaConfiguration config) { super(weights, String.format("lm_%d", LanguageModelFF.LM_INDEX++), args, config); this.type = parsedArgs.get("lm_type"); this.ngramOrder = Integer.parseInt(parsedArgs.get("lm_order")); this.path = parsedArgs.get("lm_file"); if (parsedArgs.containsKey("class_map")) try { this.isClassLM = true; this.classMap = new ClassMap(parsedArgs.get("class_map")); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // The dense feature initialization hasn't happened yet, so we have to retrieve this as sparse this.weight = weights.getSparse(name); initializeLM(); } @Override public ArrayList<String> reportDenseFeatures(int index) { denseFeatureIndex = index; ArrayList<String> names = new ArrayList<String>(); names.add(name); return names; } /** * Initializes the underlying language model. * * @param config * @param type * @param path */ public void initializeLM() { if (type.equals("kenlm")) { this.languageModel = new KenLM(ngramOrder, path); } else if (type.equals("berkeleylm")) { this.languageModel = new LMGrammarBerkeley(ngramOrder, path); } else { System.err.println(String.format("* FATAL: Invalid backend lm_type '%s' for LanguageModel", type)); System.err.println(String.format("* Permissible values for 'lm_type' are 'kenlm' and 'berkeleylm'")); System.exit(-1); } Vocabulary.registerLanguageModel(this.languageModel); Vocabulary.id(config.default_non_terminal); LanguageModelFF.START_SYM_ID = Vocabulary.id(Vocabulary.START_SYM); LanguageModelFF.STOP_SYM_ID = Vocabulary.id(Vocabulary.STOP_SYM); } public NGramLanguageModel getLM() { return this.languageModel; } public String logString() { if (languageModel != null) return String.format("%s, order %d (weight %.3f)", name, languageModel.getOrder(), weight); else return "WHOA"; } /** * Computes the features incurred along this edge. Note that these features are unweighted costs * of the feature; they are the feature cost, not the model cost, or the inner product of them. */ @Override public DPState compute(Rule rule, List<HGNode> tailNodes, int i, int j, SourcePath sourcePath, Sentence sentence, Accumulator acc) { NgramDPState newState = null; if (rule != null) { if (config.source_annotations) { // Get source side annotations and project them to the target side newState = computeTransition(getTags(rule, i, j, sentence), tailNodes, acc); } else { if (this.isClassLM) { // Use a class language model // Return target side classes newState = computeTransition(getClasses(rule), tailNodes, acc); } else { // Default LM newState = computeTransition(rule.getEnglish(), tailNodes, acc); } } } return newState; } /** * Input sentences can be tagged with information specific to the language model. This looks for * such annotations by following a word's alignments back to the source words, checking for * annotations, and replacing the surface word if such annotations are found. * */ protected int[] getTags(Rule rule, int begin, int end, Sentence sentence) { /* Very important to make a copy here, so the original rule is not modified */ int[] tokens = Arrays.copyOf(rule.getEnglish(), rule.getEnglish().length); byte[] alignments = rule.getAlignment(); // System.err.println(String.format("getTags() %s", rule.getRuleString())); /* For each target-side token, project it to each of its source-language alignments. If any of those * are annotated, take the first annotation and quit. */ if (alignments != null) { for (int i = 0; i < tokens.length; i++) { if (tokens[i] > 0) { // skip nonterminals for (int j = 0; j < alignments.length; j += 2) { if (alignments[j] == i) { String annotation = sentence.getAnnotation((int)alignments[i] + begin, "class"); if (annotation != null) { // System.err.println(String.format(" word %d source %d abs %d annotation %d/%s", // i, alignments[i], alignments[i] + begin, annotation, Vocabulary.word(annotation))); tokens[i] = Vocabulary.id(annotation); break; } } } } } } return tokens; } /** * Sets the class map if this is a class LM * @param classMap * @throws IOException */ public void setClassMap(String fileName) throws IOException { this.classMap = new ClassMap(fileName); } /** * Replace each word in a rule with the target side classes. */ protected int[] getClasses(Rule rule) { if (this.classMap == null) { System.err.println("The class map is not set. Cannot use the class LM "); System.exit(2); } /* Very important to make a copy here, so the original rule is not modified */ int[] tokens = Arrays.copyOf(rule.getEnglish(), rule.getEnglish().length); for (int i = 0; i < tokens.length; i++) { if (tokens[i] > 0 ) { tokens[i] = this.classMap.getClassID(tokens[i]); } } return tokens; } @Override public DPState computeFinal(HGNode tailNode, int i, int j, SourcePath sourcePath, Sentence sentence, Accumulator acc) { return computeFinalTransition((NgramDPState) tailNode.getDPState(stateIndex), acc); } /** * This function computes all the complete n-grams found in the rule, as well as the incomplete * n-grams on the left-hand side. */ @Override public float estimateCost(Rule rule, Sentence sentence) { float estimate = 0.0f; boolean considerIncompleteNgrams = true; int[] enWords = rule.getEnglish(); List<Integer> words = new ArrayList<Integer>(); boolean skipStart = (enWords[0] == START_SYM_ID); /* * Move through the words, accumulating language model costs each time we have an n-gram (n >= * 2), and resetting the series of words when we hit a nonterminal. */ for (int c = 0; c < enWords.length; c++) { int currentWord = enWords[c]; if (Vocabulary.nt(currentWord)) { estimate += scoreChunkLogP(words, considerIncompleteNgrams, skipStart); words.clear(); skipStart = false; } else { words.add(currentWord); } } estimate += scoreChunkLogP(words, considerIncompleteNgrams, skipStart); return weight * estimate; } /** * Estimates the future cost of a rule. For the language model feature, this is the sum of the * costs of the leftmost k-grams, k = [1..n-1]. */ @Override public float estimateFutureCost(Rule rule, DPState currentState, Sentence sentence) { NgramDPState state = (NgramDPState) currentState; float estimate = 0.0f; int[] leftContext = state.getLeftLMStateWords(); if (null != leftContext) { List<Integer> words = new ArrayList<Integer>(); for (int w : leftContext) words.add(w); boolean considerIncompleteNgrams = true; boolean skipStart = true; if (words.get(0) != START_SYM_ID) { skipStart = false; } estimate += scoreChunkLogP(words, considerIncompleteNgrams, skipStart); } return weight * estimate; } /** * Compute the cost of a rule application. The cost of applying a rule is computed by determining * the n-gram costs for all n-grams created by this rule application, and summing them. N-grams * are created when (a) terminal words in the rule string are followed by a nonterminal (b) * terminal words in the rule string are preceded by a nonterminal (c) we encounter adjacent * nonterminals. In all of these situations, the corresponding boundary words of the node in the * hypergraph represented by the nonterminal must be retrieved. * * IMPORTANT: only complete n-grams are scored. This means that hypotheses with fewer words * than the complete n-gram state remain *unscored*. This fact adds a lot of complication to the * code, including the use of the computeFinal* family of functions, which correct this fact for * sentences that are too short on the final transition. */ private NgramDPState computeTransition(int[] enWords, List<HGNode> tailNodes, Accumulator acc) { int[] current = new int[this.ngramOrder]; int[] shadow = new int[this.ngramOrder]; int ccount = 0; float transitionLogP = 0.0f; int[] left_context = null; for (int c = 0; c < enWords.length; c++) { int curID = enWords[c]; if (Vocabulary.nt(curID)) { int index = -(curID + 1); NgramDPState state = (NgramDPState) tailNodes.get(index).getDPState(stateIndex); int[] left = state.getLeftLMStateWords(); int[] right = state.getRightLMStateWords(); // Left context. for (int i = 0; i < left.length; i++) { current[ccount++] = left[i]; if (left_context == null && ccount == this.ngramOrder - 1) left_context = Arrays.copyOf(current, ccount); if (ccount == this.ngramOrder) { // Compute the current word probability, and remove it. float prob = this.languageModel.ngramLogProbability(current, this.ngramOrder); // System.err.println(String.format("-> prob(%s) = %f", Vocabulary.getWords(current), prob)); transitionLogP += prob; System.arraycopy(current, 1, shadow, 0, this.ngramOrder - 1); int[] tmp = current; current = shadow; shadow = tmp; --ccount; } } System.arraycopy(right, 0, current, ccount - right.length, right.length); } else { // terminal words current[ccount++] = curID; if (left_context == null && ccount == this.ngramOrder - 1) left_context = Arrays.copyOf(current, ccount); if (ccount == this.ngramOrder) { // Compute the current word probability, and remove it.s float prob = this.languageModel.ngramLogProbability(current, this.ngramOrder); // System.err.println(String.format("-> prob(%s) = %f", Vocabulary.getWords(current), prob)); transitionLogP += prob; System.arraycopy(current, 1, shadow, 0, this.ngramOrder - 1); int[] tmp = current; current = shadow; shadow = tmp; --ccount; } } } // acc.add(name, transitionLogP); acc.add(denseFeatureIndex, transitionLogP); if (left_context != null) { return new NgramDPState(left_context, Arrays.copyOfRange(current, ccount - this.ngramOrder + 1, ccount)); } else { int[] context = Arrays.copyOf(current, ccount); return new NgramDPState(context, context); } } /** * This function differs from regular transitions because we incorporate the cost of incomplete * left-hand ngrams, as well as including the start- and end-of-sentence markers (if they were * requested when the object was created). * * @param state the dynamic programming state * @return the final transition probability (including incomplete n-grams) */ private NgramDPState computeFinalTransition(NgramDPState state, Accumulator acc) { // System.err.println(String.format("LanguageModel::computeFinalTransition()")); float res = 0.0f; LinkedList<Integer> currentNgram = new LinkedList<Integer>(); int[] leftContext = state.getLeftLMStateWords(); int[] rightContext = state.getRightLMStateWords(); for (int i = 0; i < leftContext.length; i++) { int t = leftContext[i]; currentNgram.add(t); if (currentNgram.size() >= 2) { // start from bigram float prob = this.languageModel.ngramLogProbability(Support.toArray(currentNgram), currentNgram.size()); res += prob; } if (currentNgram.size() == this.ngramOrder) currentNgram.removeFirst(); } // Tell the accumulator // acc.add(name, res); acc.add(denseFeatureIndex, res); // State is the same return new NgramDPState(leftContext, rightContext); } /** * This function is basically a wrapper for NGramLanguageModel::sentenceLogProbability(). It * computes the probability of a phrase ("chunk"), using lower-order n-grams for the first n-1 * words. * * @param words * @param considerIncompleteNgrams * @param skipStart * @return the phrase log probability */ private float scoreChunkLogP(List<Integer> words, boolean considerIncompleteNgrams, boolean skipStart) { float score = 0.0f; if (words.size() > 0) { int startIndex; if (!considerIncompleteNgrams) { startIndex = this.ngramOrder; } else if (skipStart) { startIndex = 2; } else { startIndex = 1; } score = this.languageModel.sentenceLogProbability( Support.subIntArray(words, 0, words.size()), this.ngramOrder, startIndex); } return score; } }
OOV fix for class-based LM
src/joshua/decoder/ff/lm/LanguageModelFF.java
OOV fix for class-based LM
Java
apache-2.0
fd08c25d432063289afb9a5f9d42357ee14ec3b2
0
yschimke/oksocial,yschimke/oksocial
package com.baulsupp.oksocial; import com.baulsupp.oksocial.apidocs.ServiceApiDocPresenter; import com.baulsupp.oksocial.authenticator.AuthInterceptor; import com.baulsupp.oksocial.authenticator.Authorisation; import com.baulsupp.oksocial.authenticator.PrintCredentials; import com.baulsupp.oksocial.authenticator.ServiceInterceptor; import com.baulsupp.oksocial.commands.CommandRegistry; import com.baulsupp.oksocial.commands.MainAware; import com.baulsupp.oksocial.commands.OksocialCommand; import com.baulsupp.oksocial.commands.ShellCommand; import com.baulsupp.oksocial.completion.ArgumentCompleter; import com.baulsupp.oksocial.completion.CompletionVariableCache; import com.baulsupp.oksocial.completion.TmpCompletionVariableCache; import com.baulsupp.oksocial.completion.UrlCompleter; import com.baulsupp.oksocial.completion.UrlList; import com.baulsupp.oksocial.credentials.CredentialsStore; import com.baulsupp.oksocial.credentials.FixedTokenCredentialsStore; import com.baulsupp.oksocial.credentials.OSXCredentialsStore; import com.baulsupp.oksocial.credentials.PreferencesCredentialsStore; import com.baulsupp.oksocial.jjs.JavascriptApiCommand; import com.baulsupp.oksocial.location.BestLocation; import com.baulsupp.oksocial.location.LocationSource; import com.baulsupp.oksocial.network.DnsMode; import com.baulsupp.oksocial.network.DnsOverride; import com.baulsupp.oksocial.network.DnsSelector; import com.baulsupp.oksocial.network.IPvMode; import com.baulsupp.oksocial.network.InterfaceSocketFactory; import com.baulsupp.oksocial.network.NettyDns; import com.baulsupp.oksocial.okhttp.OkHttpResponseFuture; import com.baulsupp.oksocial.output.ConsoleHandler; import com.baulsupp.oksocial.output.DownloadHandler; import com.baulsupp.oksocial.output.OutputHandler; import com.baulsupp.oksocial.security.CertificatePin; import com.baulsupp.oksocial.security.CertificateUtils; import com.baulsupp.oksocial.security.ConsoleCallbackHandler; import com.baulsupp.oksocial.security.InsecureHostnameVerifier; import com.baulsupp.oksocial.security.InsecureTrustManager; import com.baulsupp.oksocial.security.OpenSCUtil; import com.baulsupp.oksocial.services.twitter.TwitterCachingInterceptor; import com.baulsupp.oksocial.services.twitter.TwitterDeflatedResponseInterceptor; import com.baulsupp.oksocial.util.FileContent; import com.baulsupp.oksocial.util.InetAddressParam; import com.baulsupp.oksocial.util.LoggingUtil; import com.baulsupp.oksocial.util.ProtocolUtil; import com.baulsupp.oksocial.util.UsageException; import com.baulsupp.oksocial.util.Util; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.mcdermottroe.apple.OSXKeychainException; import com.moczul.ok2curl.CurlInterceptor; import io.airlift.airline.Arguments; import io.airlift.airline.Command; import io.airlift.airline.HelpOption; import io.airlift.airline.Option; import io.airlift.airline.SingleCommand; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.concurrent.DefaultThreadFactory; import java.io.File; import java.io.IOException; import java.net.Proxy; import java.net.SocketException; import java.security.KeyStore; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import javax.net.SocketFactory; import javax.net.ssl.KeyManager; import javax.net.ssl.X509TrustManager; import okhttp3.Cache; import okhttp3.Call; import okhttp3.Dns; import okhttp3.HttpUrl; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import okhttp3.logging.HttpLoggingInterceptor; import static com.baulsupp.oksocial.security.CertificateUtils.trustManagerForKeyStore; import static com.baulsupp.oksocial.security.KeystoreUtils.createKeyManager; import static com.baulsupp.oksocial.security.KeystoreUtils.createSslSocketFactory; import static com.baulsupp.oksocial.security.KeystoreUtils.getKeyStore; import static com.baulsupp.oksocial.security.KeystoreUtils.keyManagerArray; import static java.util.Arrays.asList; import static java.util.Optional.empty; import static java.util.Optional.ofNullable; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.stream.Collectors.joining; @SuppressWarnings({"WeakerAccess", "CanBeFinal", "unused"}) @Command(name = Main.NAME, description = "A curl for social apis.") public class Main extends HelpOption implements Runnable { private static Logger logger = Logger.getLogger(Main.class.getName()); static final String NAME = "oksocial"; private static Main fromArgs(String... args) { return SingleCommand.singleCommand(Main.class).parse(args); } public static void main(String... args) { fromArgs(args).run(); } @Option(name = {"-X", "--request"}, description = "Specify request command to use") public String method; @Option(name = {"-d", "--data"}, description = "HTTP POST data") public String data; @Option(name = {"-H", "--header"}, description = "Custom header to pass to server") public List<String> headers; @Option(name = {"-A", "--user-agent"}, description = "User-Agent to send to server") public String userAgent = NAME + "/" + versionString(); @Option(name = "--connect-timeout", description = "Maximum time allowed for connection (seconds)") public Integer connectTimeout; @Option(name = "--read-timeout", description = "Maximum time allowed for reading data (seconds)") public Integer readTimeout; @Option(name = {"--no-follow"}, description = "Follow redirects") public boolean dontFollowRedirects = false; @Option(name = {"-k", "--insecure"}, description = "Allow connections to SSL sites without certs") public boolean allowInsecure = false; @Option(name = {"-i", "--include"}, description = "Include protocol headers in the output") public boolean showHeaders = false; @Option(name = "--frames", description = "Log HTTP/2 frames to STDERR") public boolean showHttp2Frames = false; @Option(name = "--debug", description = "Debug") public boolean debug = false; @Option(name = {"-e", "--referer"}, description = "Referer URL") public String referer; @Option(name = {"-V", "--version"}, description = "Show version number and quit") public boolean version = false; @Option(name = {"--cache"}, description = "Cache directory") public File cacheDirectory = null; @Option(name = {"--protocols"}, description = "Protocols") public String protocols; @Option(name = {"-o", "--output"}, description = "Output file/directory") public File outputDirectory; @Option(name = {"--authorize"}, description = "Authorize API") public boolean authorize; @Option(name = {"--renew"}, description = "Renew API Authorization") public boolean renew; @Option(name = {"--token"}, description = "Use existing Token for authorization") public String token; @Option(name = {"--curl"}, description = "Show curl commands") public boolean curl = false; @Option(name = {"--ip"}, description = "IP Preferences (system, ipv4, ipv6, ipv4only, ipv6only)", allowedValues = {"system", "ipv4", "ipv6", "ipv4only", "ipv6only"}) public IPvMode ipMode = IPvMode.SYSTEM; @Option(name = {"--dns"}, description = "DNS (netty, java)", allowedValues = {"java", "netty"}) public DnsMode dnsMode = DnsMode.NETTY; @Option(name = {"--dnsServers"}, description = "Specific DNS Servers (csv, google)") public String dnsServers = null; @Option(name = {"--resolve"}, description = "DNS Overrides (HOST:TARGET)") public List<String> resolve = null; @Option(name = {"--certificatePin"}, description = "Specific Local Network Interface") public List<CertificatePin> certificatePins = null; @Option(name = {"--networkInterface"}, description = "Specific Local Network Interface") public String networkInterface = null; @Option(name = {"--clientauth"}, description = "Use Client Authentication (from keystore)") public boolean clientAuth = false; @Option(name = {"--keystore"}, description = "Keystore") public File keystoreFile = null; @Option(name = {"--cert"}, description = "Use given server cert (Root CA)") public List<File> serverCerts = Lists.newArrayList(); @Option(name = {"--opensc"}, description = "Send OpenSC Client Certificate (slot)") public Integer opensc; @Option(name = {"--socks"}, description = "Use SOCKS proxy") public InetAddressParam socksProxy; @Option(name = {"--proxy"}, description = "Use HTTP proxy") public InetAddressParam proxy; @Option(name = {"--show-credentials"}, description = "Show Credentials") public boolean showCredentials = false; @Option(name = {"--alias-names"}, description = "Show Alias Names") public boolean aliasNames = false; @Option(name = {"-r", "--raw"}, description = "Raw Output") public boolean rawOutput = false; @Option(name = {"-s", "--set"}, description = "Token Set e.g. work") public String tokenSet = null; @Option(name = {"--serviceNames"}, description = "Service Names") public boolean serviceNames = false; @Option(name = {"--urlCompletion"}, description = "URL Completion") public boolean urlComplete; @Option(name = {"--apidoc"}, description = "API Documentation") public boolean apiDoc; @Option(name = {"--ssldebug"}, description = "SSL Debug") public boolean sslDebug; public String commandName = System.getProperty("command.name", "oksocial"); public String completionFile = System.getenv("COMPLETION_FILE"); @Arguments(title = "arguments", description = "Remote resource URLs") public List<String> arguments = new ArrayList<>(); public ServiceInterceptor serviceInterceptor = null; private Authorisation authorisation; public OkHttpClient client = null; public Request.Builder requestBuilder; public CommandRegistry commandRegistry = new CommandRegistry(); public OutputHandler outputHandler = null; public CredentialsStore credentialsStore = null; public CompletionVariableCache completionVariableCache; public LocationSource locationSource = new BestLocation(); private NioEventLoopGroup eventLoopGroup; private String versionString() { return Util.versionString("/oksocial-version.properties"); } @Override public void run() { if (sslDebug) { System.setProperty("javax.net.debug", "ssl,handshake"); } LoggingUtil.configureLogging(debug, showHttp2Frames); if (outputHandler == null) { outputHandler = buildHandler(); } if (showHelpIfRequested()) { return; } try { if (version) { outputHandler.info(NAME + " " + versionString()); return; } initialise(); if (showCredentials) { new PrintCredentials(client, credentialsStore, outputHandler, serviceInterceptor).showCredentials(arguments, this::createRequestBuilder); return; } if (aliasNames) { printAliasNames(); return; } if (serviceNames) { outputHandler.info(serviceInterceptor.names().stream().collect(joining(" "))); return; } if (urlComplete) { outputHandler.info(urlCompletionList()); return; } if (apiDoc) { showApiDocs(); return; } if (authorize) { authorize(); return; } if (renew) { renew(); return; } executeRequests(outputHandler); } catch (Exception e) { outputHandler.showError("unknown error", e); } finally { closeClients(); } } private void showApiDocs() throws Exception { ServiceApiDocPresenter docs = new ServiceApiDocPresenter(serviceInterceptor, client, credentialsStore); getFullCompletionUrl().ifPresent(u -> { try { docs.explainApi(u, outputHandler, client); } catch (IOException e) { throw Throwables.propagate(e); } }); } // TODO refactor this mess out of Main private String urlCompletionList() throws Exception { ShellCommand command = getShellCommand(); Optional<ArgumentCompleter> commandCompletor = command.completer(); if (commandCompletor.isPresent()) { UrlList urls = commandCompletion(commandCompletor.get(), arguments); String prefix = arguments.get(arguments.size() - 1); if (completionFile != null) { urls.toFile(new File(completionFile), 0, prefix); } return urls.getUrls(prefix).stream().collect(joining("\n")); } ArgumentCompleter completer = new UrlCompleter(serviceInterceptor.services(), client, credentialsStore, completionVariableCache); Optional<String> fullCompletionUrlOpt = getFullCompletionUrl(); // reload hack (in case changed for "" case) String originalCompletionUrl = arguments.get(arguments.size() - 1); if (fullCompletionUrlOpt.isPresent()) { String fullCompletionUrl = fullCompletionUrlOpt.get(); UrlList urls = completer.urlList(fullCompletionUrl); final int strip; if (!fullCompletionUrl.equals(originalCompletionUrl)) { strip = fullCompletionUrl.length() - originalCompletionUrl.length(); } else { strip = 0; } if (completionFile != null) { urls.toFile(new File(completionFile), strip, originalCompletionUrl); } return urls.getUrls(fullCompletionUrl).stream() .map(u -> u.substring(strip)) .collect(joining("\n")); } else { return ""; } } private UrlList commandCompletion(ArgumentCompleter urlCompleter, List<String> arguments) throws IOException { return urlCompleter.urlList(arguments.get(arguments.size() - 1)); } /* * The last url in arguments which should be used for completion or apidoc requests. * In the case of javascript command expansion, it is expanded first before * being returned. * * n.b. arguments may be modified by this call. */ private Optional<String> getFullCompletionUrl() throws Exception { if (arguments.isEmpty()) { return empty(); } String urlToComplete = arguments.get(arguments.size() - 1); ShellCommand command = getShellCommand(); if (command instanceof JavascriptApiCommand) { List<Request> requests = command.buildRequests(client, requestBuilder, arguments); if (requests.size() > 0) { HttpUrl newUrl = requests.get(0).url(); // support "" -> http://api.test.com if (urlToComplete.isEmpty() && newUrl.encodedPath().equals("/")) { urlToComplete = "/"; arguments.remove(arguments.size() - 1); arguments.add(urlToComplete); } String newUrlCompletion = newUrl.toString(); if (newUrlCompletion.endsWith(urlToComplete)) { return Optional.of(newUrlCompletion); } } } else if (UrlCompleter.isPossibleAddress(urlToComplete)) { return Optional.of(urlToComplete); } return empty(); } public void initialise() throws Exception { if (outputHandler == null) { outputHandler = buildHandler(); } if (credentialsStore == null) { credentialsStore = createCredentialsStore(); } OkHttpClient.Builder clientBuilder = createClientBuilder(); OkHttpClient authClient = clientBuilder.build(); serviceInterceptor = new ServiceInterceptor(authClient, credentialsStore); authorisation = new Authorisation(serviceInterceptor, credentialsStore, authClient, outputHandler); clientBuilder.networkInterceptors().add(0, serviceInterceptor); client = clientBuilder.build(); requestBuilder = createRequestBuilder(); if (completionVariableCache == null) { completionVariableCache = new TmpCompletionVariableCache(); } } public OkHttpClient getClient() { return client; } private CredentialsStore createCredentialsStore() throws OSXKeychainException { if (token != null && !authorize) { return new FixedTokenCredentialsStore(token); } if (Util.isOSX()) { return new OSXCredentialsStore(ofNullable(tokenSet)); } else { return new PreferencesCredentialsStore(ofNullable(tokenSet)); } } private void closeClients() { if (client != null) { client.dispatcher().executorService().shutdown(); client.connectionPool().evictAll(); } if (eventLoopGroup != null) { eventLoopGroup.shutdownGracefully(0, 0, TimeUnit.SECONDS); } } private OutputHandler buildHandler() { if (outputDirectory != null) { return new DownloadHandler(outputDirectory); } else if (rawOutput) { return new DownloadHandler(new File("-")); } else { return ConsoleHandler.instance(); } } private void executeRequests(OutputHandler outputHandler) throws Exception { ShellCommand command = getShellCommand(); List<Request> requests = command.buildRequests(client, requestBuilder, arguments); if (!command.handlesRequests()) { if (requests.isEmpty()) { throw new UsageException("no urls specified"); } List<Future<Response>> responseFutures = enqueueRequests(requests, client); processResponses(outputHandler, responseFutures); } } private void processResponses(OutputHandler outputHandler, List<Future<Response>> responseFutures) throws IOException, InterruptedException { boolean failed = false; for (Future<Response> responseFuture : responseFutures) { if (failed) { responseFuture.cancel(true); } else { try (Response response = responseFuture.get()) { outputHandler.showOutput(response, showHeaders); } catch (ExecutionException ee) { outputHandler.showError("request failed", ee.getCause()); failed = true; } } } } private List<Future<Response>> enqueueRequests(List<Request> requests, OkHttpClient client) { List<Future<Response>> responseFutures = Lists.newArrayList(); for (Request request : requests) { logger.log(Level.FINE, "url " + request.url()); if (requests.size() > 1 && !debug) { System.err.println(request.url()); } responseFutures.add(makeRequest(client, request)); } return responseFutures; } private ShellCommand getShellCommand() { ShellCommand shellCommand = commandRegistry.getCommandByName(commandName).orElse(new OksocialCommand()); if (shellCommand instanceof MainAware) { ((MainAware) shellCommand).setMain(this); } return shellCommand; } private void printAliasNames() { Set<String> names = Sets.newTreeSet(commandRegistry.names()); names.forEach(outputHandler::info); } private Future<Response> makeRequest(OkHttpClient client, Request request) { logger.log(Level.FINE, "Request " + request); Call call = client.newCall(request); OkHttpResponseFuture result = new OkHttpResponseFuture(); call.enqueue(result); return result.future; } private void authorize() throws Exception { Optional<AuthInterceptor<?>> auth = findAuthInterceptor(); authorisation.authorize(auth, ofNullable(token), arguments); } private void renew() throws Exception { Optional auth = findAuthInterceptor(); authorisation.renew(auth); } private Optional<AuthInterceptor<?>> findAuthInterceptor() throws Exception { ShellCommand command = getShellCommand(); Optional<AuthInterceptor<?>> auth = command.authenticator().flatMap((authName) -> serviceInterceptor.getByName(authName)); if (!auth.isPresent() && !arguments.isEmpty()) { String name = arguments.remove(0); auth = serviceInterceptor.findAuthInterceptor(name); } return auth; } public OkHttpClient.Builder createClientBuilder() throws Exception { OkHttpClient.Builder builder = new OkHttpClient.Builder(); builder.followSslRedirects(!dontFollowRedirects); builder.followRedirects(!dontFollowRedirects); if (connectTimeout != null) { builder.connectTimeout(connectTimeout, SECONDS); } if (readTimeout != null) { builder.readTimeout(readTimeout, SECONDS); } builder.dns(buildDns()); if (networkInterface != null) { builder.socketFactory(getSocketFactory()); } configureTls(builder); if (cacheDirectory != null) { builder.cache(new Cache(cacheDirectory, 64 * 1024 * 1024)); } // TODO move behind AuthInterceptor API builder.addNetworkInterceptor(new TwitterCachingInterceptor()); builder.addNetworkInterceptor(new TwitterDeflatedResponseInterceptor()); if (curl) { builder.addNetworkInterceptor(new CurlInterceptor(System.err::println)); } if (debug) { builder.networkInterceptors().add(new HttpLoggingInterceptor(logger::info)); } if (socksProxy != null) { builder.proxy(new Proxy(Proxy.Type.SOCKS, socksProxy.address)); } else if (proxy != null) { builder.proxy(new Proxy(Proxy.Type.HTTP, proxy.address)); } if (protocols != null) { builder.protocols(ProtocolUtil.parseProtocolList(protocols)); } return builder; } private Dns buildDns() { Dns dns; if (dnsMode == DnsMode.NETTY) { dns = NettyDns.byName(ipMode, getEventLoopGroup(), dnsServers); } else { if (dnsServers != null) { throw new UsageException("unable to set dns servers with java DNS"); } dns = new DnsSelector(ipMode); } if (resolve != null) { dns = DnsOverride.build(dns, resolve); } return dns; } private NioEventLoopGroup getEventLoopGroup() { if (eventLoopGroup == null) { ThreadFactory threadFactory = new DefaultThreadFactory("netty", true); eventLoopGroup = new NioEventLoopGroup(1, threadFactory); } return eventLoopGroup; } private SocketFactory getSocketFactory() throws SocketException { Optional<SocketFactory> socketFactory = InterfaceSocketFactory.byName(networkInterface); if (!socketFactory.isPresent()) { throw new UsageException("networkInterface '" + networkInterface + "' not found"); } return socketFactory.get(); } private void configureTls(OkHttpClient.Builder builder) throws Exception { ConsoleCallbackHandler callbackHandler = new ConsoleCallbackHandler(); // possibly null KeyStore keystore = null; if (keystoreFile != null) { keystore = getKeyStore(keystoreFile); } List<KeyManager> keyManagers = Lists.newArrayList(); if (opensc != null) { keyManagers.addAll(asList(OpenSCUtil.getKeyManagers(callbackHandler, opensc))); } else if (clientAuth) { if (keystore == null) { throw new UsageException("--clientauth specified without --keystore"); } keyManagers.add(createKeyManager(keystore, callbackHandler)); } X509TrustManager trustManager; if (allowInsecure) { trustManager = new InsecureTrustManager(); builder.hostnameVerifier(new InsecureHostnameVerifier()); } else { List<X509TrustManager> trustManagers = Lists.newArrayList(); if (keystore != null) { trustManagers.add(trustManagerForKeyStore(keystore)); } if (!serverCerts.isEmpty()) { trustManagers.add(CertificateUtils.load(serverCerts)); } trustManager = CertificateUtils.combineTrustManagers(trustManagers); } builder.sslSocketFactory(createSslSocketFactory(keyManagerArray(keyManagers), trustManager), trustManager); if (certificatePins != null) { builder.certificatePinner(CertificatePin.buildFromCommandLine(certificatePins)); } } private String getRequestMethod() { if (method != null) { return method; } if (data != null) { return "POST"; } return "GET"; } private RequestBody getRequestBody() throws IOException { if (data == null) { return null; } String mimeType = "application/x-www-form-urlencoded"; if (headers != null) { for (String header : headers) { String[] parts = header.split(":", -1); if ("Content-Type".equalsIgnoreCase(parts[0])) { mimeType = parts[1].trim(); headers.remove(header); break; } } } return RequestBody.create(MediaType.parse(mimeType), FileContent.readParamBytes(data)); } public Request.Builder createRequestBuilder() throws IOException { Request.Builder requestBuilder = new Request.Builder(); requestBuilder.method(getRequestMethod(), getRequestBody()); if (headers != null) { for (String header : headers) { String[] parts = header.split(":", 2); requestBuilder.header(parts[0], parts[1]); } } if (referer != null) { requestBuilder.header("Referer", referer); } requestBuilder.header("User-Agent", userAgent); return requestBuilder; } }
src/main/java/com/baulsupp/oksocial/Main.java
package com.baulsupp.oksocial; import com.baulsupp.oksocial.apidocs.ServiceApiDocPresenter; import com.baulsupp.oksocial.authenticator.AuthInterceptor; import com.baulsupp.oksocial.authenticator.Authorisation; import com.baulsupp.oksocial.authenticator.PrintCredentials; import com.baulsupp.oksocial.authenticator.ServiceInterceptor; import com.baulsupp.oksocial.commands.CommandRegistry; import com.baulsupp.oksocial.commands.MainAware; import com.baulsupp.oksocial.commands.OksocialCommand; import com.baulsupp.oksocial.commands.ShellCommand; import com.baulsupp.oksocial.completion.ArgumentCompleter; import com.baulsupp.oksocial.completion.CompletionVariableCache; import com.baulsupp.oksocial.completion.TmpCompletionVariableCache; import com.baulsupp.oksocial.completion.UrlCompleter; import com.baulsupp.oksocial.completion.UrlList; import com.baulsupp.oksocial.credentials.CredentialsStore; import com.baulsupp.oksocial.credentials.FixedTokenCredentialsStore; import com.baulsupp.oksocial.credentials.OSXCredentialsStore; import com.baulsupp.oksocial.credentials.PreferencesCredentialsStore; import com.baulsupp.oksocial.jjs.JavascriptApiCommand; import com.baulsupp.oksocial.location.BestLocation; import com.baulsupp.oksocial.location.LocationSource; import com.baulsupp.oksocial.network.DnsMode; import com.baulsupp.oksocial.network.DnsOverride; import com.baulsupp.oksocial.network.DnsSelector; import com.baulsupp.oksocial.network.IPvMode; import com.baulsupp.oksocial.network.InterfaceSocketFactory; import com.baulsupp.oksocial.network.NettyDns; import com.baulsupp.oksocial.okhttp.OkHttpResponseFuture; import com.baulsupp.oksocial.output.ConsoleHandler; import com.baulsupp.oksocial.output.DownloadHandler; import com.baulsupp.oksocial.output.OutputHandler; import com.baulsupp.oksocial.security.CertificatePin; import com.baulsupp.oksocial.security.CertificateUtils; import com.baulsupp.oksocial.security.ConsoleCallbackHandler; import com.baulsupp.oksocial.security.InsecureHostnameVerifier; import com.baulsupp.oksocial.security.InsecureTrustManager; import com.baulsupp.oksocial.security.OpenSCUtil; import com.baulsupp.oksocial.services.twitter.TwitterCachingInterceptor; import com.baulsupp.oksocial.services.twitter.TwitterDeflatedResponseInterceptor; import com.baulsupp.oksocial.util.FileContent; import com.baulsupp.oksocial.util.InetAddressParam; import com.baulsupp.oksocial.util.LoggingUtil; import com.baulsupp.oksocial.util.ProtocolUtil; import com.baulsupp.oksocial.util.UsageException; import com.baulsupp.oksocial.util.Util; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.mcdermottroe.apple.OSXKeychainException; import com.moczul.ok2curl.CurlInterceptor; import io.airlift.airline.Arguments; import io.airlift.airline.Command; import io.airlift.airline.HelpOption; import io.airlift.airline.Option; import io.airlift.airline.SingleCommand; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.concurrent.DefaultThreadFactory; import java.io.File; import java.io.IOException; import java.net.Proxy; import java.net.SocketException; import java.security.KeyStore; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import javax.net.SocketFactory; import javax.net.ssl.KeyManager; import javax.net.ssl.X509TrustManager; import okhttp3.Cache; import okhttp3.Call; import okhttp3.Dns; import okhttp3.HttpUrl; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import okhttp3.logging.HttpLoggingInterceptor; import static com.baulsupp.oksocial.security.CertificateUtils.trustManagerForKeyStore; import static com.baulsupp.oksocial.security.KeystoreUtils.createKeyManager; import static com.baulsupp.oksocial.security.KeystoreUtils.createSslSocketFactory; import static com.baulsupp.oksocial.security.KeystoreUtils.getKeyStore; import static com.baulsupp.oksocial.security.KeystoreUtils.keyManagerArray; import static java.util.Arrays.asList; import static java.util.Optional.empty; import static java.util.Optional.ofNullable; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.stream.Collectors.joining; @SuppressWarnings({"WeakerAccess", "CanBeFinal", "unused"}) @Command(name = Main.NAME, description = "A curl for social apis.") public class Main extends HelpOption implements Runnable { private static Logger logger = Logger.getLogger(Main.class.getName()); static final String NAME = "oksocial"; private static Main fromArgs(String... args) { return SingleCommand.singleCommand(Main.class).parse(args); } public static void main(String... args) { fromArgs(args).run(); } @Option(name = {"-X", "--request"}, description = "Specify request command to use") public String method; @Option(name = {"-d", "--data"}, description = "HTTP POST data") public String data; @Option(name = {"-H", "--header"}, description = "Custom header to pass to server") public List<String> headers; @Option(name = {"-A", "--user-agent"}, description = "User-Agent to send to server") public String userAgent = NAME + "/" + versionString(); @Option(name = "--connect-timeout", description = "Maximum time allowed for connection (seconds)") public Integer connectTimeout; @Option(name = "--read-timeout", description = "Maximum time allowed for reading data (seconds)") public Integer readTimeout; @Option(name = {"-L", "--location"}, description = "Follow redirects") public boolean followRedirects = false; @Option(name = {"-k", "--insecure"}, description = "Allow connections to SSL sites without certs") public boolean allowInsecure = false; @Option(name = {"-i", "--include"}, description = "Include protocol headers in the output") public boolean showHeaders = false; @Option(name = "--frames", description = "Log HTTP/2 frames to STDERR") public boolean showHttp2Frames = false; @Option(name = "--debug", description = "Debug") public boolean debug = false; @Option(name = {"-e", "--referer"}, description = "Referer URL") public String referer; @Option(name = {"-V", "--version"}, description = "Show version number and quit") public boolean version = false; @Option(name = {"--cache"}, description = "Cache directory") public File cacheDirectory = null; @Option(name = {"--protocols"}, description = "Protocols") public String protocols; @Option(name = {"-o", "--output"}, description = "Output file/directory") public File outputDirectory; @Option(name = {"--authorize"}, description = "Authorize API") public boolean authorize; @Option(name = {"--renew"}, description = "Renew API Authorization") public boolean renew; @Option(name = {"--token"}, description = "Use existing Token for authorization") public String token; @Option(name = {"--curl"}, description = "Show curl commands") public boolean curl = false; @Option(name = {"--ip"}, description = "IP Preferences (system, ipv4, ipv6, ipv4only, ipv6only)", allowedValues = {"system", "ipv4", "ipv6", "ipv4only", "ipv6only"}) public IPvMode ipMode = IPvMode.SYSTEM; @Option(name = {"--dns"}, description = "DNS (netty, java)", allowedValues = {"java", "netty"}) public DnsMode dnsMode = DnsMode.NETTY; @Option(name = {"--dnsServers"}, description = "Specific DNS Servers (csv, google)") public String dnsServers = null; @Option(name = {"--resolve"}, description = "DNS Overrides (HOST:TARGET)") public List<String> resolve = null; @Option(name = {"--certificatePin"}, description = "Specific Local Network Interface") public List<CertificatePin> certificatePins = null; @Option(name = {"--networkInterface"}, description = "Specific Local Network Interface") public String networkInterface = null; @Option(name = {"--clientauth"}, description = "Use Client Authentication (from keystore)") public boolean clientAuth = false; @Option(name = {"--keystore"}, description = "Keystore") public File keystoreFile = null; @Option(name = {"--cert"}, description = "Use given server cert (Root CA)") public List<File> serverCerts = Lists.newArrayList(); @Option(name = {"--opensc"}, description = "Send OpenSC Client Certificate (slot)") public Integer opensc; @Option(name = {"--socks"}, description = "Use SOCKS proxy") public InetAddressParam socksProxy; @Option(name = {"--proxy"}, description = "Use HTTP proxy") public InetAddressParam proxy; @Option(name = {"--show-credentials"}, description = "Show Credentials") public boolean showCredentials = false; @Option(name = {"--alias-names"}, description = "Show Alias Names") public boolean aliasNames = false; @Option(name = {"-r", "--raw"}, description = "Raw Output") public boolean rawOutput = false; @Option(name = {"-s", "--set"}, description = "Token Set e.g. work") public String tokenSet = null; @Option(name = {"--serviceNames"}, description = "Service Names") public boolean serviceNames = false; @Option(name = {"--urlCompletion"}, description = "URL Completion") public boolean urlComplete; @Option(name = {"--apidoc"}, description = "API Documentation") public boolean apiDoc; @Option(name = {"--ssldebug"}, description = "SSL Debug") public boolean sslDebug; public String commandName = System.getProperty("command.name", "oksocial"); public String completionFile = System.getenv("COMPLETION_FILE"); @Arguments(title = "arguments", description = "Remote resource URLs") public List<String> arguments = new ArrayList<>(); public ServiceInterceptor serviceInterceptor = null; private Authorisation authorisation; public OkHttpClient client = null; public Request.Builder requestBuilder; public CommandRegistry commandRegistry = new CommandRegistry(); public OutputHandler outputHandler = null; public CredentialsStore credentialsStore = null; public CompletionVariableCache completionVariableCache; public LocationSource locationSource = new BestLocation(); private NioEventLoopGroup eventLoopGroup; private String versionString() { return Util.versionString("/oksocial-version.properties"); } @Override public void run() { if (sslDebug) { System.setProperty("javax.net.debug", "ssl,handshake"); } LoggingUtil.configureLogging(debug, showHttp2Frames); if (outputHandler == null) { outputHandler = buildHandler(); } if (showHelpIfRequested()) { return; } try { if (version) { outputHandler.info(NAME + " " + versionString()); return; } initialise(); if (showCredentials) { new PrintCredentials(client, credentialsStore, outputHandler, serviceInterceptor).showCredentials(arguments, this::createRequestBuilder); return; } if (aliasNames) { printAliasNames(); return; } if (serviceNames) { outputHandler.info(serviceInterceptor.names().stream().collect(joining(" "))); return; } if (urlComplete) { outputHandler.info(urlCompletionList()); return; } if (apiDoc) { showApiDocs(); return; } if (authorize) { authorize(); return; } if (renew) { renew(); return; } executeRequests(outputHandler); } catch (Exception e) { outputHandler.showError("unknown error", e); } finally { closeClients(); } } private void showApiDocs() throws Exception { ServiceApiDocPresenter docs = new ServiceApiDocPresenter(serviceInterceptor, client, credentialsStore); getFullCompletionUrl().ifPresent(u -> { try { docs.explainApi(u, outputHandler, client); } catch (IOException e) { throw Throwables.propagate(e); } }); } // TODO refactor this mess out of Main private String urlCompletionList() throws Exception { ShellCommand command = getShellCommand(); Optional<ArgumentCompleter> commandCompletor = command.completer(); if (commandCompletor.isPresent()) { UrlList urls = commandCompletion(commandCompletor.get(), arguments); String prefix = arguments.get(arguments.size() - 1); if (completionFile != null) { urls.toFile(new File(completionFile), 0, prefix); } return urls.getUrls(prefix).stream().collect(joining("\n")); } ArgumentCompleter completer = new UrlCompleter(serviceInterceptor.services(), client, credentialsStore, completionVariableCache); Optional<String> fullCompletionUrlOpt = getFullCompletionUrl(); // reload hack (in case changed for "" case) String originalCompletionUrl = arguments.get(arguments.size() - 1); if (fullCompletionUrlOpt.isPresent()) { String fullCompletionUrl = fullCompletionUrlOpt.get(); UrlList urls = completer.urlList(fullCompletionUrl); final int strip; if (!fullCompletionUrl.equals(originalCompletionUrl)) { strip = fullCompletionUrl.length() - originalCompletionUrl.length(); } else { strip = 0; } if (completionFile != null) { urls.toFile(new File(completionFile), strip, originalCompletionUrl); } return urls.getUrls(fullCompletionUrl).stream() .map(u -> u.substring(strip)) .collect(joining("\n")); } else { return ""; } } private UrlList commandCompletion(ArgumentCompleter urlCompleter, List<String> arguments) throws IOException { return urlCompleter.urlList(arguments.get(arguments.size() - 1)); } /* * The last url in arguments which should be used for completion or apidoc requests. * In the case of javascript command expansion, it is expanded first before * being returned. * * n.b. arguments may be modified by this call. */ private Optional<String> getFullCompletionUrl() throws Exception { if (arguments.isEmpty()) { return empty(); } String urlToComplete = arguments.get(arguments.size() - 1); ShellCommand command = getShellCommand(); if (command instanceof JavascriptApiCommand) { List<Request> requests = command.buildRequests(client, requestBuilder, arguments); if (requests.size() > 0) { HttpUrl newUrl = requests.get(0).url(); // support "" -> http://api.test.com if (urlToComplete.isEmpty() && newUrl.encodedPath().equals("/")) { urlToComplete = "/"; arguments.remove(arguments.size() - 1); arguments.add(urlToComplete); } String newUrlCompletion = newUrl.toString(); if (newUrlCompletion.endsWith(urlToComplete)) { return Optional.of(newUrlCompletion); } } } else if (UrlCompleter.isPossibleAddress(urlToComplete)) { return Optional.of(urlToComplete); } return empty(); } public void initialise() throws Exception { if (outputHandler == null) { outputHandler = buildHandler(); } if (credentialsStore == null) { credentialsStore = createCredentialsStore(); } OkHttpClient.Builder clientBuilder = createClientBuilder(); OkHttpClient authClient = clientBuilder.build(); serviceInterceptor = new ServiceInterceptor(authClient, credentialsStore); authorisation = new Authorisation(serviceInterceptor, credentialsStore, authClient, outputHandler); clientBuilder.networkInterceptors().add(0, serviceInterceptor); client = clientBuilder.build(); requestBuilder = createRequestBuilder(); if (completionVariableCache == null) { completionVariableCache = new TmpCompletionVariableCache(); } } public OkHttpClient getClient() { return client; } private CredentialsStore createCredentialsStore() throws OSXKeychainException { if (token != null && !authorize) { return new FixedTokenCredentialsStore(token); } if (Util.isOSX()) { return new OSXCredentialsStore(ofNullable(tokenSet)); } else { return new PreferencesCredentialsStore(ofNullable(tokenSet)); } } private void closeClients() { if (client != null) { client.dispatcher().executorService().shutdown(); client.connectionPool().evictAll(); } if (eventLoopGroup != null) { eventLoopGroup.shutdownGracefully(0, 0, TimeUnit.SECONDS); } } private OutputHandler buildHandler() { if (outputDirectory != null) { return new DownloadHandler(outputDirectory); } else if (rawOutput) { return new DownloadHandler(new File("-")); } else { return ConsoleHandler.instance(); } } private void executeRequests(OutputHandler outputHandler) throws Exception { ShellCommand command = getShellCommand(); List<Request> requests = command.buildRequests(client, requestBuilder, arguments); if (!command.handlesRequests()) { if (requests.isEmpty()) { throw new UsageException("no urls specified"); } List<Future<Response>> responseFutures = enqueueRequests(requests, client); processResponses(outputHandler, responseFutures); } } private void processResponses(OutputHandler outputHandler, List<Future<Response>> responseFutures) throws IOException, InterruptedException { boolean failed = false; for (Future<Response> responseFuture : responseFutures) { if (failed) { responseFuture.cancel(true); } else { try (Response response = responseFuture.get()) { outputHandler.showOutput(response, showHeaders); } catch (ExecutionException ee) { outputHandler.showError("request failed", ee.getCause()); failed = true; } } } } private List<Future<Response>> enqueueRequests(List<Request> requests, OkHttpClient client) { List<Future<Response>> responseFutures = Lists.newArrayList(); for (Request request : requests) { logger.log(Level.FINE, "url " + request.url()); if (requests.size() > 1 && !debug) { System.err.println(request.url()); } responseFutures.add(makeRequest(client, request)); } return responseFutures; } private ShellCommand getShellCommand() { ShellCommand shellCommand = commandRegistry.getCommandByName(commandName).orElse(new OksocialCommand()); if (shellCommand instanceof MainAware) { ((MainAware) shellCommand).setMain(this); } return shellCommand; } private void printAliasNames() { Set<String> names = Sets.newTreeSet(commandRegistry.names()); names.forEach(outputHandler::info); } private Future<Response> makeRequest(OkHttpClient client, Request request) { logger.log(Level.FINE, "Request " + request); Call call = client.newCall(request); OkHttpResponseFuture result = new OkHttpResponseFuture(); call.enqueue(result); return result.future; } private void authorize() throws Exception { Optional<AuthInterceptor<?>> auth = findAuthInterceptor(); authorisation.authorize(auth, ofNullable(token), arguments); } private void renew() throws Exception { Optional auth = findAuthInterceptor(); authorisation.renew(auth); } private Optional<AuthInterceptor<?>> findAuthInterceptor() throws Exception { ShellCommand command = getShellCommand(); Optional<AuthInterceptor<?>> auth = command.authenticator().flatMap((authName) -> serviceInterceptor.getByName(authName)); if (!auth.isPresent() && !arguments.isEmpty()) { String name = arguments.remove(0); auth = serviceInterceptor.findAuthInterceptor(name); } return auth; } public OkHttpClient.Builder createClientBuilder() throws Exception { OkHttpClient.Builder builder = new OkHttpClient.Builder(); builder.followSslRedirects(followRedirects); if (connectTimeout != null) { builder.connectTimeout(connectTimeout, SECONDS); } if (readTimeout != null) { builder.readTimeout(readTimeout, SECONDS); } builder.dns(buildDns()); if (networkInterface != null) { builder.socketFactory(getSocketFactory()); } configureTls(builder); if (cacheDirectory != null) { builder.cache(new Cache(cacheDirectory, 64 * 1024 * 1024)); } // TODO move behind AuthInterceptor API builder.addNetworkInterceptor(new TwitterCachingInterceptor()); builder.addNetworkInterceptor(new TwitterDeflatedResponseInterceptor()); if (curl) { builder.addNetworkInterceptor(new CurlInterceptor(System.err::println)); } if (debug) { builder.networkInterceptors().add(new HttpLoggingInterceptor(logger::info)); } if (socksProxy != null) { builder.proxy(new Proxy(Proxy.Type.SOCKS, socksProxy.address)); } else if (proxy != null) { builder.proxy(new Proxy(Proxy.Type.HTTP, proxy.address)); } if (protocols != null) { builder.protocols(ProtocolUtil.parseProtocolList(protocols)); } return builder; } private Dns buildDns() { Dns dns; if (dnsMode == DnsMode.NETTY) { dns = NettyDns.byName(ipMode, getEventLoopGroup(), dnsServers); } else { if (dnsServers != null) { throw new UsageException("unable to set dns servers with java DNS"); } dns = new DnsSelector(ipMode); } if (resolve != null) { dns = DnsOverride.build(dns, resolve); } return dns; } private NioEventLoopGroup getEventLoopGroup() { if (eventLoopGroup == null) { ThreadFactory threadFactory = new DefaultThreadFactory("netty", true); eventLoopGroup = new NioEventLoopGroup(1, threadFactory); } return eventLoopGroup; } private SocketFactory getSocketFactory() throws SocketException { Optional<SocketFactory> socketFactory = InterfaceSocketFactory.byName(networkInterface); if (!socketFactory.isPresent()) { throw new UsageException("networkInterface '" + networkInterface + "' not found"); } return socketFactory.get(); } private void configureTls(OkHttpClient.Builder builder) throws Exception { ConsoleCallbackHandler callbackHandler = new ConsoleCallbackHandler(); // possibly null KeyStore keystore = null; if (keystoreFile != null) { keystore = getKeyStore(keystoreFile); } List<KeyManager> keyManagers = Lists.newArrayList(); if (opensc != null) { keyManagers.addAll(asList(OpenSCUtil.getKeyManagers(callbackHandler, opensc))); } else if (clientAuth) { if (keystore == null) { throw new UsageException("--clientauth specified without --keystore"); } keyManagers.add(createKeyManager(keystore, callbackHandler)); } X509TrustManager trustManager; if (allowInsecure) { trustManager = new InsecureTrustManager(); builder.hostnameVerifier(new InsecureHostnameVerifier()); } else { List<X509TrustManager> trustManagers = Lists.newArrayList(); if (keystore != null) { trustManagers.add(trustManagerForKeyStore(keystore)); } if (!serverCerts.isEmpty()) { trustManagers.add(CertificateUtils.load(serverCerts)); } trustManager = CertificateUtils.combineTrustManagers(trustManagers); } builder.sslSocketFactory(createSslSocketFactory(keyManagerArray(keyManagers), trustManager), trustManager); if (certificatePins != null) { builder.certificatePinner(CertificatePin.buildFromCommandLine(certificatePins)); } } private String getRequestMethod() { if (method != null) { return method; } if (data != null) { return "POST"; } return "GET"; } private RequestBody getRequestBody() throws IOException { if (data == null) { return null; } String mimeType = "application/x-www-form-urlencoded"; if (headers != null) { for (String header : headers) { String[] parts = header.split(":", -1); if ("Content-Type".equalsIgnoreCase(parts[0])) { mimeType = parts[1].trim(); headers.remove(header); break; } } } return RequestBody.create(MediaType.parse(mimeType), FileContent.readParamBytes(data)); } public Request.Builder createRequestBuilder() throws IOException { Request.Builder requestBuilder = new Request.Builder(); requestBuilder.method(getRequestMethod(), getRequestBody()); if (headers != null) { for (String header : headers) { String[] parts = header.split(":", 2); requestBuilder.header(parts[0], parts[1]); } } if (referer != null) { requestBuilder.header("Referer", referer); } requestBuilder.header("User-Agent", userAgent); return requestBuilder; } }
no-follow (#237)
src/main/java/com/baulsupp/oksocial/Main.java
no-follow (#237)
Java
bsd-3-clause
cfc0839e7896a4212e6b4fdf14d3f111e7248528
0
NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,NCIP/caaers,NCIP/caaers
package gov.nih.nci.cabig.caaers.web.study; import gov.nih.nci.cabig.caaers.dao.query.ajax.StudySearchableAjaxableDomainObjectQuery; import gov.nih.nci.cabig.caaers.dao.ResearchStaffDao; import gov.nih.nci.cabig.caaers.dao.InvestigatorDao; import gov.nih.nci.cabig.caaers.domain.ajax.StudySearchableAjaxableDomainObject; import gov.nih.nci.cabig.caaers.domain.ajax.StudySiteAjaxableDomainObject; import gov.nih.nci.cabig.caaers.domain.repository.ajax.StudySearchableAjaxableDomainObjectRepository; import gov.nih.nci.cabig.caaers.domain.StudyParticipantAssignment; import gov.nih.nci.cabig.caaers.domain.Organization; import gov.nih.nci.cabig.caaers.domain.SiteResearchStaff; import gov.nih.nci.cabig.caaers.tools.configuration.Configuration; import gov.nih.nci.cabig.caaers.web.AbstractAjaxFacade; import gov.nih.nci.cabig.caaers.web.participant.AssignParticipantController; import gov.nih.nci.cabig.caaers.web.participant.AssignParticipantStudyCommand; import gov.nih.nci.cabig.caaers.CaaersSystemException; import java.util.*; import javax.servlet.http.HttpServletRequest; import org.extremecomponents.table.bean.Column; import org.extremecomponents.table.bean.Row; import org.extremecomponents.table.bean.Table; import org.extremecomponents.table.context.Context; import org.extremecomponents.table.context.HttpServletRequestContext; import org.extremecomponents.table.core.TableModel; import org.extremecomponents.table.core.TableModelImpl; import org.springframework.beans.factory.annotation.Required; import org.directwebremoting.WebContext; import org.directwebremoting.WebContextFactory; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.Log; public class SearchStudyAjaxFacade { private Class<?>[] CONTROLLERS = {AssignParticipantController.class}; private StudySearchableAjaxableDomainObjectRepository studySearchableAjaxableDomainObjectRepository; private static final Log log = LogFactory.getLog(SearchStudyAjaxFacade.class); public Object build(TableModel model, Collection studySearchableAjaxableDomainObjects) throws Exception { addTable(model, studySearchableAjaxableDomainObjects); addPrimaryIdColumn(model); addShorTitleColumn(model); addSponsorColumn(model); addPhaseCodeColumn(model); addStatusColumn(model); return model.assemble(); } private void addStatusColumn(TableModel model) { Column columnStatusCode = model.getColumnInstance(); columnStatusCode.setProperty("status"); model.addColumn(columnStatusCode); columnStatusCode.setSortable(Boolean.TRUE); } private void addPhaseCodeColumn(TableModel model) { Column columnPhaseCode = model.getColumnInstance(); columnPhaseCode.setTitle("Phase"); columnPhaseCode.setProperty("phaseCode"); model.addColumn(columnPhaseCode); columnPhaseCode.setSortable(Boolean.TRUE); } private void addSponsorColumn(TableModel model) { Column columnSponsorCode = model.getColumnInstance(); columnSponsorCode.setTitle("Funding Sponsor"); columnSponsorCode.setProperty("primarySponsorCode"); columnSponsorCode.setSortable(Boolean.TRUE); model.addColumn(columnSponsorCode); } private void addShorTitleColumn(TableModel model) { Column columnShortTitle = model.getColumnInstance(); columnShortTitle.setTitle("Short Title"); columnShortTitle.setProperty("shortTitle"); columnShortTitle.setSortable(Boolean.TRUE); model.addColumn(columnShortTitle); } private void addPrimaryIdColumn(TableModel model) { Column columnPrimaryIdentifier = model.getColumnInstance(); columnPrimaryIdentifier.setProperty("primaryIdentifierValue"); columnPrimaryIdentifier.setTitle("Study ID"); columnPrimaryIdentifier.setCell("gov.nih.nci.cabig.caaers.web.study.StudyLinkDisplayCell"); model.addColumn(columnPrimaryIdentifier); } private void addTable(TableModel model, Collection studySearchableAjaxableDomainObjects) { Table table = model.getTableInstance(); table.setTableId("ajaxTable"); table.setForm("assembler"); table.setItems(studySearchableAjaxableDomainObjects); table.setAction(model.getContext().getContextPath() + "/assembler.run"); table.setTitle(""); table.setShowPagination(Configuration.LAST_LOADED_CONFIGURATION.isAuthenticationModeLocal()); table.setOnInvokeAction("buildTable('assembler')"); table.setImagePath(model.getContext().getContextPath() + "/images/table/*.gif"); //only support filtering & sorting in local authentication mode. table.setFilterable(Configuration.LAST_LOADED_CONFIGURATION.isAuthenticationModeLocal()); table.setSortable(Configuration.LAST_LOADED_CONFIGURATION.isAuthenticationModeLocal()); if(Configuration.LAST_LOADED_CONFIGURATION.isAuthenticationModeLocal()){ table.setRowsDisplayed(100); } table.setSortRowsCallback("gov.nih.nci.cabig.caaers.web.table.SortRowsCallbackImpl"); table.setAutoIncludeParameters(false); model.addTable(table); Row row = model.getRowInstance(); row.setHighlightRow(Boolean.TRUE); model.addRow(row); } public String getTable(Map parameterMap, String type, String text, HttpServletRequest request) { List<StudySearchableAjaxableDomainObject> studySearchableAjaxableDomainObjects = getObjects(type, text); try { Context context = null; if (parameterMap == null) { context = new HttpServletRequestContext(request); } else { context = new HttpServletRequestContext(request, parameterMap); } TableModel model = new TableModelImpl(context); return build(model, studySearchableAjaxableDomainObjects).toString(); } catch (Exception e) { e.printStackTrace(); } return ""; } public String getTableForAssignParticipant(Map parameterMap, String type, String text, HttpServletRequest request) { int organizationID; try { organizationID = Integer.parseInt((String) parameterMap.get("organizationID")); } catch (Exception e) { organizationID = 0; } List<StudySearchableAjaxableDomainObject> studySearchableAjaxableDomainObjects = getObjects(type, text, organizationID, true); // filter objects Object command = extractCommand(); if (command instanceof AssignParticipantStudyCommand) { AssignParticipantStudyCommand c = (AssignParticipantStudyCommand)command; if (c.getLoggedinResearchStaff() != null) { List<StudySearchableAjaxableDomainObject> _s = new ArrayList<StudySearchableAjaxableDomainObject>(); boolean isTheSameSite = c.getLoggedInOrganizations().contains(c.getOrganization()); Set<String> orgCodes = new HashSet<String>(); for (Organization o : c.getLoggedInOrganizations()) { orgCodes.add(o.getNciInstituteCode()); } for (StudySearchableAjaxableDomainObject s : studySearchableAjaxableDomainObjects) { boolean isGood = false; if (isTheSameSite) { // if the Participant's Site is the same as Loggedin user, show all studies where this site is just a StudySite for (StudySiteAjaxableDomainObject ss : s.getStudySites()) { if (ss.getNciInstituteCode().equals(c.getOrganization().getNciInstituteCode())) { isGood = true; } } } else { // if the Participant's Site is other than Loggedin user, show all studies where this site is just a StudySite if (orgCodes.contains(s.getCoordinatingCenterCode()) || orgCodes.contains(s.getPrimarySponsorCode())) { isGood = true; } } if (isGood) _s.add(s); } studySearchableAjaxableDomainObjects = _s; } else { } } // try { Context context = null; if (parameterMap == null) { context = new HttpServletRequestContext(request); } else { context = new HttpServletRequestContext(request, parameterMap); } TableModel model = new TableModelImpl(context); addTable(model, studySearchableAjaxableDomainObjects); Column columnPrimaryIdentifier = model.getColumnInstance(); columnPrimaryIdentifier.setProperty("primaryIdentifierValue"); columnPrimaryIdentifier.setSortable(true); columnPrimaryIdentifier.setTitle("Study ID"); model.addColumn(columnPrimaryIdentifier); Column columnShortTitle = model.getColumnInstance(); columnShortTitle.setProperty("shortTitle"); columnShortTitle.setSortable(Boolean.TRUE); model.addColumn(columnShortTitle); addSponsorColumn(model); addPhaseCodeColumn(model); addStatusColumn(model); Column columnStudySite = model.getColumnInstance(); columnStudySite.setProperty("shortTitle"); columnStudySite.setSortable(Boolean.TRUE); columnStudySite.setTitle("Study Sites"); columnStudySite.setCell("gov.nih.nci.cabig.caaers.web.search.cell.SelectedStudySiteCell"); model.addColumn(columnStudySite); return model.assemble().toString(); } catch (Exception e) { e.printStackTrace(); } return ""; } private List<StudySearchableAjaxableDomainObject> getObjects(String type, String text) { return getObjects(type, text, 0, false); } public List<StudySearchableAjaxableDomainObject> getObjects(String type, String text, int organizationID, boolean hideIncomplete) { StudySearchableAjaxableDomainObjectQuery studySearchableAjaxableDomainObjectQuery = new StudySearchableAjaxableDomainObjectQuery(); if (organizationID > 0) studySearchableAjaxableDomainObjectQuery.filterStudiesByStudySiteBySiteId(organizationID); studySearchableAjaxableDomainObjectQuery.filterByDataEntryStatus(hideIncomplete); StringTokenizer typeToken = new StringTokenizer(type, ","); StringTokenizer textToken = new StringTokenizer(text, ","); String sType; String sText; while (typeToken.hasMoreTokens() && textToken.hasMoreTokens()) { sType = typeToken.nextToken(); sText = textToken.nextToken(); if ("st".equals(sType)) { studySearchableAjaxableDomainObjectQuery.filterStudiesWithMatchingShortTitleOnly(sText); } else if ("idtf".equals(sType)) { studySearchableAjaxableDomainObjectQuery.filterStudiesWithMatchingIdentifierOnly(sText); } } List<StudySearchableAjaxableDomainObject> studySearchableAjaxableDomainObjects = studySearchableAjaxableDomainObjectRepository.findStudies(studySearchableAjaxableDomainObjectQuery,type, text); return studySearchableAjaxableDomainObjects; } @Required public void setStudySearchableAjaxableDomainObjectRepository(StudySearchableAjaxableDomainObjectRepository studySearchableAjaxableDomainObjectRepository) { this.studySearchableAjaxableDomainObjectRepository = studySearchableAjaxableDomainObjectRepository; } private Object extractCommand() { WebContext webContext = WebContextFactory.get(); Object command = null; for (Class<?> controllerClass : CONTROLLERS) { String formSessionAttributeName = controllerClass.getName() + ".FORM.command"; command = webContext.getSession().getAttribute(formSessionAttributeName); if (command == null) { log.debug("Command not found using name " + formSessionAttributeName); } else { log.debug("Command found using name " + formSessionAttributeName); break; } } if (command == null) { throw new CaaersSystemException("Could not find command in session"); } else { return command; } } public Class<?>[] getCONTROLLERS() { return CONTROLLERS; } public void setCONTROLLERS(Class<?>[] CONTROLLERS) { this.CONTROLLERS = CONTROLLERS; } }
caAERS/software/web/src/main/java/gov/nih/nci/cabig/caaers/web/study/SearchStudyAjaxFacade.java
package gov.nih.nci.cabig.caaers.web.study; import gov.nih.nci.cabig.caaers.dao.query.ajax.StudySearchableAjaxableDomainObjectQuery; import gov.nih.nci.cabig.caaers.dao.ResearchStaffDao; import gov.nih.nci.cabig.caaers.dao.InvestigatorDao; import gov.nih.nci.cabig.caaers.domain.ajax.StudySearchableAjaxableDomainObject; import gov.nih.nci.cabig.caaers.domain.ajax.StudySiteAjaxableDomainObject; import gov.nih.nci.cabig.caaers.domain.repository.ajax.StudySearchableAjaxableDomainObjectRepository; import gov.nih.nci.cabig.caaers.domain.StudyParticipantAssignment; import gov.nih.nci.cabig.caaers.domain.Organization; import gov.nih.nci.cabig.caaers.domain.SiteResearchStaff; import gov.nih.nci.cabig.caaers.web.AbstractAjaxFacade; import gov.nih.nci.cabig.caaers.web.participant.AssignParticipantController; import gov.nih.nci.cabig.caaers.web.participant.AssignParticipantStudyCommand; import gov.nih.nci.cabig.caaers.CaaersSystemException; import java.util.*; import javax.servlet.http.HttpServletRequest; import org.extremecomponents.table.bean.Column; import org.extremecomponents.table.bean.Row; import org.extremecomponents.table.bean.Table; import org.extremecomponents.table.context.Context; import org.extremecomponents.table.context.HttpServletRequestContext; import org.extremecomponents.table.core.TableModel; import org.extremecomponents.table.core.TableModelImpl; import org.springframework.beans.factory.annotation.Required; import org.directwebremoting.WebContext; import org.directwebremoting.WebContextFactory; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.Log; public class SearchStudyAjaxFacade { private Class<?>[] CONTROLLERS = {AssignParticipantController.class}; private StudySearchableAjaxableDomainObjectRepository studySearchableAjaxableDomainObjectRepository; private static final Log log = LogFactory.getLog(SearchStudyAjaxFacade.class); public Object build(TableModel model, Collection studySearchableAjaxableDomainObjects) throws Exception { addTable(model, studySearchableAjaxableDomainObjects); addPrimaryIdColumn(model); addShorTitleColumn(model); addSponsorColumn(model); addPhaseCodeColumn(model); addStatusColumn(model); return model.assemble(); } private void addStatusColumn(TableModel model) { Column columnStatusCode = model.getColumnInstance(); columnStatusCode.setProperty("status"); model.addColumn(columnStatusCode); columnStatusCode.setSortable(Boolean.TRUE); } private void addPhaseCodeColumn(TableModel model) { Column columnPhaseCode = model.getColumnInstance(); columnPhaseCode.setTitle("Phase"); columnPhaseCode.setProperty("phaseCode"); model.addColumn(columnPhaseCode); columnPhaseCode.setSortable(Boolean.TRUE); } private void addSponsorColumn(TableModel model) { Column columnSponsorCode = model.getColumnInstance(); columnSponsorCode.setTitle("Funding Sponsor"); columnSponsorCode.setProperty("primarySponsorCode"); columnSponsorCode.setSortable(Boolean.TRUE); model.addColumn(columnSponsorCode); } private void addShorTitleColumn(TableModel model) { Column columnShortTitle = model.getColumnInstance(); columnShortTitle.setTitle("Short Title"); columnShortTitle.setProperty("shortTitle"); columnShortTitle.setSortable(Boolean.TRUE); model.addColumn(columnShortTitle); } private void addPrimaryIdColumn(TableModel model) { Column columnPrimaryIdentifier = model.getColumnInstance(); columnPrimaryIdentifier.setProperty("primaryIdentifierValue"); columnPrimaryIdentifier.setTitle("Study ID"); columnPrimaryIdentifier.setCell("gov.nih.nci.cabig.caaers.web.study.StudyLinkDisplayCell"); model.addColumn(columnPrimaryIdentifier); } private void addTable(TableModel model, Collection studySearchableAjaxableDomainObjects) { Table table = model.getTableInstance(); table.setTableId("ajaxTable"); table.setForm("assembler"); table.setItems(studySearchableAjaxableDomainObjects); table.setAction(model.getContext().getContextPath() + "/assembler.run"); table.setTitle(""); table.setShowPagination(true); table.setOnInvokeAction("buildTable('assembler')"); table.setImagePath(model.getContext().getContextPath() + "/images/table/*.gif"); table.setFilterable(true); table.setSortable(false); table.setSortRowsCallback("gov.nih.nci.cabig.caaers.web.table.SortRowsCallbackImpl"); table.setAutoIncludeParameters(false); model.addTable(table); Row row = model.getRowInstance(); row.setHighlightRow(Boolean.TRUE); model.addRow(row); } public String getTable(Map parameterMap, String type, String text, HttpServletRequest request) { List<StudySearchableAjaxableDomainObject> studySearchableAjaxableDomainObjects = getObjects(type, text); try { Context context = null; if (parameterMap == null) { context = new HttpServletRequestContext(request); } else { context = new HttpServletRequestContext(request, parameterMap); } TableModel model = new TableModelImpl(context); return build(model, studySearchableAjaxableDomainObjects).toString(); } catch (Exception e) { e.printStackTrace(); } return ""; } public String getTableForAssignParticipant(Map parameterMap, String type, String text, HttpServletRequest request) { int organizationID; try { organizationID = Integer.parseInt((String) parameterMap.get("organizationID")); } catch (Exception e) { organizationID = 0; } List<StudySearchableAjaxableDomainObject> studySearchableAjaxableDomainObjects = getObjects(type, text, organizationID, true); // filter objects Object command = extractCommand(); if (command instanceof AssignParticipantStudyCommand) { AssignParticipantStudyCommand c = (AssignParticipantStudyCommand)command; if (c.getLoggedinResearchStaff() != null) { List<StudySearchableAjaxableDomainObject> _s = new ArrayList<StudySearchableAjaxableDomainObject>(); boolean isTheSameSite = c.getLoggedInOrganizations().contains(c.getOrganization()); Set<String> orgCodes = new HashSet<String>(); for (Organization o : c.getLoggedInOrganizations()) { orgCodes.add(o.getNciInstituteCode()); } for (StudySearchableAjaxableDomainObject s : studySearchableAjaxableDomainObjects) { boolean isGood = false; if (isTheSameSite) { // if the Participant's Site is the same as Loggedin user, show all studies where this site is just a StudySite for (StudySiteAjaxableDomainObject ss : s.getStudySites()) { if (ss.getNciInstituteCode().equals(c.getOrganization().getNciInstituteCode())) { isGood = true; } } } else { // if the Participant's Site is other than Loggedin user, show all studies where this site is just a StudySite if (orgCodes.contains(s.getCoordinatingCenterCode()) || orgCodes.contains(s.getPrimarySponsorCode())) { isGood = true; } } if (isGood) _s.add(s); } studySearchableAjaxableDomainObjects = _s; } else { } } // try { Context context = null; if (parameterMap == null) { context = new HttpServletRequestContext(request); } else { context = new HttpServletRequestContext(request, parameterMap); } TableModel model = new TableModelImpl(context); addTable(model, studySearchableAjaxableDomainObjects); Column columnPrimaryIdentifier = model.getColumnInstance(); columnPrimaryIdentifier.setProperty("primaryIdentifierValue"); columnPrimaryIdentifier.setSortable(true); columnPrimaryIdentifier.setTitle("Study ID"); model.addColumn(columnPrimaryIdentifier); Column columnShortTitle = model.getColumnInstance(); columnShortTitle.setProperty("shortTitle"); columnShortTitle.setSortable(Boolean.TRUE); model.addColumn(columnShortTitle); addSponsorColumn(model); addPhaseCodeColumn(model); addStatusColumn(model); Column columnStudySite = model.getColumnInstance(); columnStudySite.setProperty("shortTitle"); columnStudySite.setSortable(Boolean.TRUE); columnStudySite.setTitle("Study Sites"); columnStudySite.setCell("gov.nih.nci.cabig.caaers.web.search.cell.SelectedStudySiteCell"); model.addColumn(columnStudySite); return model.assemble().toString(); } catch (Exception e) { e.printStackTrace(); } return ""; } private List<StudySearchableAjaxableDomainObject> getObjects(String type, String text) { return getObjects(type, text, 0, false); } public List<StudySearchableAjaxableDomainObject> getObjects(String type, String text, int organizationID, boolean hideIncomplete) { StudySearchableAjaxableDomainObjectQuery studySearchableAjaxableDomainObjectQuery = new StudySearchableAjaxableDomainObjectQuery(); if (organizationID > 0) studySearchableAjaxableDomainObjectQuery.filterStudiesByStudySiteBySiteId(organizationID); studySearchableAjaxableDomainObjectQuery.filterByDataEntryStatus(hideIncomplete); StringTokenizer typeToken = new StringTokenizer(type, ","); StringTokenizer textToken = new StringTokenizer(text, ","); String sType; String sText; while (typeToken.hasMoreTokens() && textToken.hasMoreTokens()) { sType = typeToken.nextToken(); sText = textToken.nextToken(); if ("st".equals(sType)) { studySearchableAjaxableDomainObjectQuery.filterStudiesWithMatchingShortTitleOnly(sText); } else if ("idtf".equals(sType)) { studySearchableAjaxableDomainObjectQuery.filterStudiesWithMatchingIdentifierOnly(sText); } } List<StudySearchableAjaxableDomainObject> studySearchableAjaxableDomainObjects = studySearchableAjaxableDomainObjectRepository.findStudies(studySearchableAjaxableDomainObjectQuery,type, text); return studySearchableAjaxableDomainObjects; } @Required public void setStudySearchableAjaxableDomainObjectRepository(StudySearchableAjaxableDomainObjectRepository studySearchableAjaxableDomainObjectRepository) { this.studySearchableAjaxableDomainObjectRepository = studySearchableAjaxableDomainObjectRepository; } private Object extractCommand() { WebContext webContext = WebContextFactory.get(); Object command = null; for (Class<?> controllerClass : CONTROLLERS) { String formSessionAttributeName = controllerClass.getName() + ".FORM.command"; command = webContext.getSession().getAttribute(formSessionAttributeName); if (command == null) { log.debug("Command not found using name " + formSessionAttributeName); } else { log.debug("Command found using name " + formSessionAttributeName); break; } } if (command == null) { throw new CaaersSystemException("Could not find command in session"); } else { return command; } } public Class<?>[] getCONTROLLERS() { return CONTROLLERS; } public void setCONTROLLERS(Class<?>[] CONTROLLERS) { this.CONTROLLERS = CONTROLLERS; } }
CAAERS-3338 SVN-Revision: 11553
caAERS/software/web/src/main/java/gov/nih/nci/cabig/caaers/web/study/SearchStudyAjaxFacade.java
CAAERS-3338
Java
mit
284b8b33945bb2f06b60282ba700c9577ad96fbd
0
mcasperson/IridiumApplicationTesting,mcasperson/IridiumApplicationTesting,mcasperson/IridiumApplicationTesting
package au.com.agic.apptesting.utils.impl; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import au.com.agic.apptesting.constants.Constants; import au.com.agic.apptesting.exception.ConfigurationException; import au.com.agic.apptesting.exception.DriverException; import au.com.agic.apptesting.profiles.configuration.UrlMapping; import au.com.agic.apptesting.utils.FeatureState; import au.com.agic.apptesting.utils.ProxyDetails; import au.com.agic.apptesting.utils.SystemPropertyUtils; import au.com.agic.apptesting.utils.ThreadWebDriverMap; import au.com.agic.apptesting.utils.WebDriverFactory; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.openqa.selenium.WebDriver; import org.openqa.selenium.remote.DesiredCapabilities; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.validation.constraints.NotNull; import javaslang.control.Try; /** * A service that generates local web driver instances to test on the local pc. Assumes that Chrome * is present and installed in the default location, and that the webdriver.chrome.driver system * property has been set, and is pointing to a version of the driver downloaded from * http://chromedriver.storage.googleapis.com/index.html */ public class LocalThreadWebDriverMapImpl implements ThreadWebDriverMap { private static final Logger LOGGER = LoggerFactory.getLogger(LocalThreadWebDriverMapImpl.class); private static final SystemPropertyUtils SYSTEM_PROPERTY_UTILS = new SystemPropertyUtilsImpl(); private static final WebDriverFactory WEB_DRIVER_FACTORY = new WebDriverFactoryImpl(); /** * The mapping between thread ids and the feature state objects that they use for the tests */ private final Map<String, FeatureState> threadIdToCapMap = new HashMap<>(); /** * The mapping between thread ids and the webdrivers that they use for the tests */ private final Map<String, WebDriver> threadIdToDriverMap = new HashMap<>(); /** * The index of the Url we are going to be testing */ private int currentUrl; /** * The index of the data set we are going to be testing */ private int currentDataset; /** * The list of URLs associated with the application we are testing */ private List<UrlMapping> originalApplicationUrls; /** * The directory that holds reports and other test script outputs */ private String reportDirectory; /** * The values that can be input into the app */ private Map<Integer, Map<String, String>> originalDataSets; /** * A list of temp folders to delete once the test is finished */ private List<File> tempFolders; /** * The port for the proxy */ private List<ProxyDetails<?>> proxies; @Override public void initialise( @NotNull final List<DesiredCapabilities> desiredCapabilities, @NotNull final List<UrlMapping> applicationUrls, @NotNull final Map<Integer, Map<String, String>> datasets, @NotNull final String myReportDirectory, @NotNull final List<File> myTempFolders, @NotNull final List<ProxyDetails<?>> myProxies) { checkNotNull(desiredCapabilities); checkNotNull(applicationUrls); checkNotNull(datasets); checkNotNull(myReportDirectory); checkNotNull(myTempFolders); checkNotNull(myProxies); originalApplicationUrls = new ArrayList<>(applicationUrls); originalDataSets = new HashMap<>(datasets); reportDirectory = myReportDirectory; tempFolders = new ArrayList<>(myTempFolders); proxies = new ArrayList<>(myProxies); } @NotNull @Override public synchronized FeatureState getDesiredCapabilitiesForThread(@NotNull final String name) { checkArgument(StringUtils.isNotBlank(name)); checkArgument(name.startsWith(Constants.THREAD_NAME_PREFIX)); if (threadIdToCapMap.containsKey(name)) { return threadIdToCapMap.get(name); } /* We have allocated our available configurations */ final int urlCount = Math.max(originalApplicationUrls.size(), 1); if (currentUrl >= urlCount) { throw new ConfigurationException("Configuration pool has been exhausted! " + currentUrl + " is greater than or equal to " + urlCount); } /* Get the details that the requesting thread will need */ final UrlMapping url = originalApplicationUrls.isEmpty() ? null : originalApplicationUrls.get(currentUrl); final Map<String, String> dataSet = originalDataSets.containsKey(currentDataset) ? new HashMap<>(originalDataSets.get(currentDataset)) : new HashMap<>(); /* Tick over to the next url when all the capabilities have been consumed */ ++currentDataset; if (currentDataset >= getMaxDataSets()) { currentDataset = 0; ++currentUrl; } final FeatureState featureState = new FeatureStateImpl( url, dataSet, reportDirectory, proxies); threadIdToCapMap.put(name, featureState); return featureState; } @NotNull @Override public synchronized WebDriver getWebDriverForThread(@NotNull final String name, final boolean createIfMissing) { checkArgument(StringUtils.isNotEmpty(name)); if (threadIdToDriverMap.containsKey(name)) { return threadIdToDriverMap.get(name); } if (createIfMissing) { LOGGER.info("WEBAPPTESTER-INFO-0006: Creating WebDriver"); final WebDriver webDriver = WEB_DRIVER_FACTORY.createWebDriver(proxies, tempFolders); threadIdToDriverMap.put(name, webDriver); return webDriver; } throw new DriverException("Could not find or create web driver"); } @Override public synchronized void clearWebDriverForThread(@NotNull final String name, final boolean quitDriver) { checkArgument(StringUtils.isNotEmpty(name)); if (threadIdToDriverMap.containsKey(name)) { if (quitDriver) { LOGGER.info("WEBAPPTESTER-INFO-0007: Quitting WebDriver"); threadIdToDriverMap.get(name).quit(); } threadIdToDriverMap.remove(name); } } @Override public synchronized int getNumberCapabilities() { return Math.max(originalApplicationUrls.size(), 1) * Math.max(getMaxDataSets(), 1); } @Override public List<File> getTempFolders() { return tempFolders; } private Integer getMaxDataSets() { try { final String maxDataSets = SYSTEM_PROPERTY_UTILS.getProperty(Constants.NUMBER_DATA_SETS_SYSTEM_PROPERTY); if (StringUtils.isNotBlank(maxDataSets)) { final Integer maxDataSetsNumber = Integer.parseInt( SYSTEM_PROPERTY_UTILS.getProperty(Constants.NUMBER_DATA_SETS_SYSTEM_PROPERTY)); return Math.min(originalDataSets.size(), maxDataSetsNumber); } } catch (final NumberFormatException ignored) { /* Input was not a number, so ignore it */ } return originalDataSets.size(); } @Override public synchronized void shutdown() { for (final WebDriver webdriver : threadIdToDriverMap.values()) { try { if (!WEB_DRIVER_FACTORY.leaveWindowsOpen()) { webdriver.quit(); } } catch (final Exception ignored) { // do nothing and continue closing the other webdrivers } } /* Clear the map */ threadIdToDriverMap.clear(); threadIdToCapMap.clear(); /* Attempt to delete all the temp folders */ getTempFolders().forEach(e -> Try.run(() -> FileUtils.deleteDirectory(e))); /* Reset the list of available configurations */ currentUrl = 0; } @Override public synchronized void shutdown(@NotNull final String name) { checkArgument(StringUtils.isNotBlank(name)); this.clearWebDriverForThread(name, !WEB_DRIVER_FACTORY.leaveWindowsOpen()); } }
src/main/java/au/com/agic/apptesting/utils/impl/LocalThreadWebDriverMapImpl.java
package au.com.agic.apptesting.utils.impl; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import au.com.agic.apptesting.constants.Constants; import au.com.agic.apptesting.exception.ConfigurationException; import au.com.agic.apptesting.exception.DriverException; import au.com.agic.apptesting.profiles.configuration.UrlMapping; import au.com.agic.apptesting.utils.FeatureState; import au.com.agic.apptesting.utils.ProxyDetails; import au.com.agic.apptesting.utils.SystemPropertyUtils; import au.com.agic.apptesting.utils.ThreadWebDriverMap; import au.com.agic.apptesting.utils.WebDriverFactory; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.openqa.selenium.WebDriver; import org.openqa.selenium.remote.DesiredCapabilities; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.validation.constraints.NotNull; import javaslang.control.Try; /** * A service that generates local web driver instances to test on the local pc. Assumes that Chrome * is present and installed in the default location, and that the webdriver.chrome.driver system * property has been set, and is pointing to a version of the driver downloaded from * http://chromedriver.storage.googleapis.com/index.html */ public class LocalThreadWebDriverMapImpl implements ThreadWebDriverMap { private static final Logger LOGGER = LoggerFactory.getLogger(LocalThreadWebDriverMapImpl.class); private static final SystemPropertyUtils SYSTEM_PROPERTY_UTILS = new SystemPropertyUtilsImpl(); private static final WebDriverFactory WEB_DRIVER_FACTORY = new WebDriverFactoryImpl(); /** * The mapping between thread ids and the feature state objects that they use for the tests */ private final Map<String, FeatureState> threadIdToCapMap = new HashMap<>(); /** * The mapping between thread ids and the webdrivers that they use for the tests */ private final Map<String, WebDriver> threadIdToDriverMap = new HashMap<>(); /** * The index of the Url we are going to be testing */ private int currentUrl; /** * The index of the data set we are going to be testing */ private int currentDataset; /** * The list of URLs associated with the application we are testing */ private List<UrlMapping> originalApplicationUrls; /** * The directory that holds reports and other test script outputs */ private String reportDirectory; /** * The values that can be input into the app */ private Map<Integer, Map<String, String>> originalDataSets; /** * A list of temp folders to delete once the test is finished */ private List<File> tempFolders; /** * The port for the proxy */ private List<ProxyDetails<?>> proxies; @Override public void initialise( @NotNull final List<DesiredCapabilities> desiredCapabilities, @NotNull final List<UrlMapping> applicationUrls, @NotNull final Map<Integer, Map<String, String>> datasets, @NotNull final String myReportDirectory, @NotNull final List<File> myTempFolders, @NotNull final List<ProxyDetails<?>> myProxies) { checkNotNull(desiredCapabilities); checkNotNull(applicationUrls); checkNotNull(datasets); checkNotNull(myReportDirectory); checkNotNull(myTempFolders); checkNotNull(myProxies); originalApplicationUrls = new ArrayList<>(applicationUrls); originalDataSets = new HashMap<>(datasets); reportDirectory = myReportDirectory; tempFolders = new ArrayList<>(myTempFolders); proxies = new ArrayList<>(myProxies); } @NotNull @Override public synchronized FeatureState getDesiredCapabilitiesForThread(@NotNull final String name) { if (threadIdToCapMap.containsKey(name)) { return threadIdToCapMap.get(name); } /* We have allocated our available configurations */ final int urlCount = Math.max(originalApplicationUrls.size(), 1); if (currentUrl >= urlCount) { throw new ConfigurationException("Configuration pool has been exhausted! " + currentUrl + " is greater than or equal to " + urlCount); } /* Get the details that the requesting thread will need */ final UrlMapping url = originalApplicationUrls.isEmpty() ? null : originalApplicationUrls.get(currentUrl); final Map<String, String> dataSet = originalDataSets.containsKey(currentDataset) ? new HashMap<>(originalDataSets.get(currentDataset)) : new HashMap<>(); /* Tick over to the next url when all the capabilities have been consumed */ ++currentDataset; if (currentDataset >= getMaxDataSets()) { currentDataset = 0; ++currentUrl; } final FeatureState featureState = new FeatureStateImpl( url, dataSet, reportDirectory, proxies); threadIdToCapMap.put(name, featureState); return featureState; } @NotNull @Override public synchronized WebDriver getWebDriverForThread(@NotNull final String name, final boolean createIfMissing) { checkArgument(StringUtils.isNotEmpty(name)); if (threadIdToDriverMap.containsKey(name)) { return threadIdToDriverMap.get(name); } if (createIfMissing) { LOGGER.info("WEBAPPTESTER-INFO-0006: Creating WebDriver"); final WebDriver webDriver = WEB_DRIVER_FACTORY.createWebDriver(proxies, tempFolders); threadIdToDriverMap.put(name, webDriver); return webDriver; } throw new DriverException("Could not find or create web driver"); } @Override public synchronized void clearWebDriverForThread(@NotNull final String name, final boolean quitDriver) { checkArgument(StringUtils.isNotEmpty(name)); if (threadIdToDriverMap.containsKey(name)) { if (quitDriver) { LOGGER.info("WEBAPPTESTER-INFO-0007: Quitting WebDriver"); threadIdToDriverMap.get(name).quit(); } threadIdToDriverMap.remove(name); } } @Override public synchronized int getNumberCapabilities() { return Math.max(originalApplicationUrls.size(), 1) * Math.max(getMaxDataSets(), 1); } @Override public List<File> getTempFolders() { return tempFolders; } private Integer getMaxDataSets() { try { final String maxDataSets = SYSTEM_PROPERTY_UTILS.getProperty(Constants.NUMBER_DATA_SETS_SYSTEM_PROPERTY); if (StringUtils.isNotBlank(maxDataSets)) { final Integer maxDataSetsNumber = Integer.parseInt( SYSTEM_PROPERTY_UTILS.getProperty(Constants.NUMBER_DATA_SETS_SYSTEM_PROPERTY)); return Math.min(originalDataSets.size(), maxDataSetsNumber); } } catch (final NumberFormatException ignored) { /* Input was not a number, so ignore it */ } return originalDataSets.size(); } @Override public synchronized void shutdown() { for (final WebDriver webdriver : threadIdToDriverMap.values()) { try { if (!WEB_DRIVER_FACTORY.leaveWindowsOpen()) { webdriver.quit(); } } catch (final Exception ignored) { // do nothing and continue closing the other webdrivers } } /* Clear the map */ threadIdToDriverMap.clear(); threadIdToCapMap.clear(); /* Attempt to delete all the temp folders */ getTempFolders().forEach(e -> Try.run(() -> FileUtils.deleteDirectory(e))); /* Reset the list of available configurations */ currentUrl = 0; } @Override public synchronized void shutdown(@NotNull final String name) { checkArgument(StringUtils.isNotBlank(name)); this.clearWebDriverForThread(name, !WEB_DRIVER_FACTORY.leaveWindowsOpen()); } }
Added some more parameter validation
src/main/java/au/com/agic/apptesting/utils/impl/LocalThreadWebDriverMapImpl.java
Added some more parameter validation
Java
mit
3d2ca5e27b861055092ab5c834be63a48063435f
0
KTH/camel-ladok3
/* * MIT License * * Copyright (c) 2017 Kungliga Tekniska högskolan * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package se.kth.infosys.smx.ladok3; public class Ladok3Message { public final class Header { public static final String EntryId = "ladok3AtomEntryId"; public static final String EntryUpdated = "ladok3AtomEntryUpdated"; public static final String Feed = "ladok3AtomFeed"; public static final String EventType = "ladok3EventType"; public static final String EventId = "ladok3EventId"; public static final String KeyType = "ladok3KeyType"; public static final String KeyValue = "ladok3KeyValue"; public static final String Service = "ladok3Service"; public static final String Operation = "ladok3ServiceOperation"; public static final String IsLastFeed = "ladok3IsLastFeed"; public static final String MessageType = "ladok3MessageType"; public static final String SequenceNumber = "ladok3MessageSequenceNumber"; } public final class MessageType { public static final String Start = "ladok3FeedStart"; public static final String Event = "ladok3Event"; public static final String Done = "ladok3FeedDone"; } }
camel-ladok3-component/src/main/java/se/kth/infosys/smx/ladok3/Ladok3Message.java
/* * MIT License * * Copyright (c) 2017 Kungliga Tekniska högskolan * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package se.kth.infosys.smx.ladok3; public class Ladok3Message { public final class Header { public static final String EntryId = "ladok3AtomEntryId"; public static final String EntryUpdated = "ladok3EventUpdated"; public static final String Feed = "ladok3AtomFeed"; public static final String EventType = "ladok3EventType"; public static final String EventId = "ladok3EventId"; public static final String KeyType = "ladok3KeyType"; public static final String KeyValue = "ladok3KeyValue"; public static final String Service = "ladok3Service"; public static final String Operation = "ladok3ServiceOperation"; public static final String IsLastFeed = "ladok3IsLastFeed"; public static final String MessageType = "ladok3MessageType"; public static final String SequenceNumber = "ladok3MessageSequenceNumber"; } public final class MessageType { public static final String Start = "ladok3FeedStart"; public static final String Event = "ladok3Event"; public static final String Done = "ladok3FeedDone"; } }
better name of header
camel-ladok3-component/src/main/java/se/kth/infosys/smx/ladok3/Ladok3Message.java
better name of header
Java
mit
f9c4cdfe7f932dc32f5fd3b36649ca07f5f28a93
0
IanEdington/ud405,IanEdington/ud405,IanEdington/ud405
package com.udacity.gamedev.reciprocatingmotion; import com.badlogic.gdx.ApplicationAdapter; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.utils.TimeUtils; import com.badlogic.gdx.utils.viewport.ExtendViewport; /** * In this exercise we'll make a circle move back and forth smoothly. We'll pick a period and and * amplitude, the set the circle x position to the center of the screen plus the amplitude times the * sin of 2Pi the elapsed time divided by the period. */ public class ReciprocatingMotion extends ApplicationAdapter { private static final float WORLD_SIZE = 480; private static final float CIRCLE_RADIUS = WORLD_SIZE / 20; private static final float MOVEMENT_DISTANCE = WORLD_SIZE / 4; // Define a constant that fixes how long a cycle of the animation should take in seconds private static final float PERIOD = 10; ShapeRenderer renderer; ExtendViewport viewport; // Create a long to hold onto ApplicationAdapter creation time private static final long START_TIME = TimeUtils.nanoTime(); @Override public void create() { renderer = new ShapeRenderer(); viewport = new ExtendViewport(WORLD_SIZE, WORLD_SIZE); // Save current value of TimeUtils.nanoTime() } @Override public void resize(int width, int height) { viewport.update(width, height, true); } @Override public void dispose() { renderer.dispose(); } @Override public void render() { viewport.apply(); Gdx.gl.glClearColor(0, 0, 0, 1); Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); renderer.setProjectionMatrix(viewport.getCamera().combined); renderer.begin(ShapeType.Filled); // Since we're using an extend viewport, the world might be bigger than we expect float worldCenterX = viewport.getWorldWidth() / 2; float worldCenterY = viewport.getWorldHeight() / 2; // Figure out how long it's been since the animation started using TimeUtils.nanoTime() float deltaNano = (TimeUtils.nanoTime() - START_TIME); // Use MathUtils.nanoToSec to figure out how many seconds the animation has been running float deltaSec = MathUtils.nanoToSec * deltaNano; // Figure out how many cycles have elapsed since the animation started running int cyclesSinceStart = (int)(deltaSec / PERIOD); // Figure out where in the cycle we are float cyclePos = (deltaSec / PERIOD) * MathUtils.PI2; // Use MathUtils.sin() to set the x position of the circle float x = MathUtils.sin(cyclePos) * MOVEMENT_DISTANCE + worldCenterX; //float x = worldCenterX; float y = worldCenterY; renderer.circle(x, y, CIRCLE_RADIUS); renderer.end(); } }
1.5.02-Exercise-ReciprocatingMotion/core/src/com/udacity/gamedev/reciprocatingmotion/ReciprocatingMotion.java
package com.udacity.gamedev.reciprocatingmotion; import com.badlogic.gdx.ApplicationAdapter; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType; import com.badlogic.gdx.utils.viewport.ExtendViewport; /** * TODO: Start Here * * In this exercise we'll make a circle move back and forth smoothly. We'll pick a period and and * amplitude, the set the circle x position to the center of the screen plus the amplitude times the * sin of 2Pi the elapsed time divided by the period. */ public class ReciprocatingMotion extends ApplicationAdapter { private static final float WORLD_SIZE = 480; private static final float CIRCLE_RADIUS = WORLD_SIZE / 20; private static final float MOVEMENT_DISTANCE = WORLD_SIZE / 4; // TODO: Define a constant that fixes how long a cycle of the animation should take in seconds ShapeRenderer renderer; ExtendViewport viewport; // TODO: Create a long to hold onto ApplicationAdapter creation time @Override public void create() { renderer = new ShapeRenderer(); viewport = new ExtendViewport(WORLD_SIZE, WORLD_SIZE); // TODO: Save current value of TimeUtils.nanoTime() } @Override public void resize(int width, int height) { viewport.update(width, height, true); } @Override public void dispose() { renderer.dispose(); } @Override public void render() { viewport.apply(); Gdx.gl.glClearColor(0, 0, 0, 1); Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); renderer.setProjectionMatrix(viewport.getCamera().combined); renderer.begin(ShapeType.Filled); // Since we're using an extend viewport, the world might be bigger than we expect float worldCenterX = viewport.getWorldWidth() / 2; float worldCenterY = viewport.getWorldHeight() / 2; // TODO: Figure out how long it's been since the animation started using TimeUtils.nanoTime() // TODO: Use MathUtils.nanoToSec to figure out how many seconds the animation has been running // TODO: Figure out how many cycles have elapsed since the animation started running // TODO: Figure out where in the cycle we are // TODO: Use MathUtils.sin() to set the x position of the circle float x = worldCenterX; float y = worldCenterY; renderer.circle(x, y, CIRCLE_RADIUS); renderer.end(); } }
Working with time
1.5.02-Exercise-ReciprocatingMotion/core/src/com/udacity/gamedev/reciprocatingmotion/ReciprocatingMotion.java
Working with time
Java
mit
c2bebf2aaa66039e4cdbd1f88de30efe1cef8058
0
milesmarchant/FRCBitBucketBase
package org.frcbitbucketbase.control.profile; import java.util.function.LongFunction; import org.frcbitbucketbase.control.MovementVector; public abstract class Spline{ LongFunction<MovementVector>[] functions; long startTime; long endTime; public Spline(LongFunction<MovementVector>[] functions, long startTime, long endTime){ this.functions = functions; this.endTime = endTime; } public MovementVector calculate(int order, long time){ return functions[order-1].apply(time); } public long getStartTime(){ return startTime; } public long getEndTime(){ return endTime; } }
org/frcbitbucketbase/control/profile/Spline.java
package org.frcbitbucketbase.control.profile; import java.util.function.LongFunction; public abstract class Spline{ LongFunction<Double>[] functions; long startTime; long endTime; public Spline(LongFunction<Double>[] functions, long startTime, long endTime){ this.functions = functions; this.endTime = endTime; } public double calculate(int order, long time){ return functions[order-1].apply(time); } public long getStartTime(){ return startTime; } public long getEndTime(){ return endTime; } }
Switched the return type of Spline's functions array; this solves the problem of getting different values from the spline.
org/frcbitbucketbase/control/profile/Spline.java
Switched the return type of Spline's functions array; this solves the problem of getting different values from the spline.